##// END OF EJS Templates
dirstate: drop the deprecated `merge` method...
marmoute -
r48725:8e46934c default
parent child Browse files
Show More
@@ -1,73 +1,72 b''
1 # dirstatenonnormalcheck.py - extension to check the consistency of the
1 # dirstatenonnormalcheck.py - extension to check the consistency of the
2 # dirstate's non-normal map
2 # dirstate's non-normal map
3 #
3 #
4 # For most operations on dirstate, this extensions checks that the nonnormalset
4 # For most operations on dirstate, this extensions checks that the nonnormalset
5 # contains the right entries.
5 # contains the right entries.
6 # It compares the nonnormal file to a nonnormalset built from the map of all
6 # It compares the nonnormal file to a nonnormalset built from the map of all
7 # the files in the dirstate to check that they contain the same files.
7 # the files in the dirstate to check that they contain the same files.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 from mercurial import (
11 from mercurial import (
12 dirstate,
12 dirstate,
13 extensions,
13 extensions,
14 pycompat,
14 pycompat,
15 )
15 )
16
16
17
17
18 def nonnormalentries(dmap):
18 def nonnormalentries(dmap):
19 """Compute nonnormal entries from dirstate's dmap"""
19 """Compute nonnormal entries from dirstate's dmap"""
20 res = set()
20 res = set()
21 for f, e in dmap.iteritems():
21 for f, e in dmap.iteritems():
22 if e.state != b'n' or e.mtime == -1:
22 if e.state != b'n' or e.mtime == -1:
23 res.add(f)
23 res.add(f)
24 return res
24 return res
25
25
26
26
27 def checkconsistency(ui, orig, dmap, _nonnormalset, label):
27 def checkconsistency(ui, orig, dmap, _nonnormalset, label):
28 """Compute nonnormalset from dmap, check that it matches _nonnormalset"""
28 """Compute nonnormalset from dmap, check that it matches _nonnormalset"""
29 nonnormalcomputedmap = nonnormalentries(dmap)
29 nonnormalcomputedmap = nonnormalentries(dmap)
30 if _nonnormalset != nonnormalcomputedmap:
30 if _nonnormalset != nonnormalcomputedmap:
31 b_orig = pycompat.sysbytes(repr(orig))
31 b_orig = pycompat.sysbytes(repr(orig))
32 ui.develwarn(b"%s call to %s\n" % (label, b_orig), config=b'dirstate')
32 ui.develwarn(b"%s call to %s\n" % (label, b_orig), config=b'dirstate')
33 ui.develwarn(b"inconsistency in nonnormalset\n", config=b'dirstate')
33 ui.develwarn(b"inconsistency in nonnormalset\n", config=b'dirstate')
34 b_nonnormal = pycompat.sysbytes(repr(_nonnormalset))
34 b_nonnormal = pycompat.sysbytes(repr(_nonnormalset))
35 ui.develwarn(b"[nonnormalset] %s\n" % b_nonnormal, config=b'dirstate')
35 ui.develwarn(b"[nonnormalset] %s\n" % b_nonnormal, config=b'dirstate')
36 b_nonnormalcomputed = pycompat.sysbytes(repr(nonnormalcomputedmap))
36 b_nonnormalcomputed = pycompat.sysbytes(repr(nonnormalcomputedmap))
37 ui.develwarn(b"[map] %s\n" % b_nonnormalcomputed, config=b'dirstate')
37 ui.develwarn(b"[map] %s\n" % b_nonnormalcomputed, config=b'dirstate')
38
38
39
39
40 def _checkdirstate(orig, self, *args, **kwargs):
40 def _checkdirstate(orig, self, *args, **kwargs):
41 """Check nonnormal set consistency before and after the call to orig"""
41 """Check nonnormal set consistency before and after the call to orig"""
42 checkconsistency(
42 checkconsistency(
43 self._ui, orig, self._map, self._map.nonnormalset, b"before"
43 self._ui, orig, self._map, self._map.nonnormalset, b"before"
44 )
44 )
45 r = orig(self, *args, **kwargs)
45 r = orig(self, *args, **kwargs)
46 checkconsistency(
46 checkconsistency(
47 self._ui, orig, self._map, self._map.nonnormalset, b"after"
47 self._ui, orig, self._map, self._map.nonnormalset, b"after"
48 )
48 )
49 return r
49 return r
50
50
51
51
52 def extsetup(ui):
52 def extsetup(ui):
53 """Wrap functions modifying dirstate to check nonnormalset consistency"""
53 """Wrap functions modifying dirstate to check nonnormalset consistency"""
54 dirstatecl = dirstate.dirstate
54 dirstatecl = dirstate.dirstate
55 devel = ui.configbool(b'devel', b'all-warnings')
55 devel = ui.configbool(b'devel', b'all-warnings')
56 paranoid = ui.configbool(b'experimental', b'nonnormalparanoidcheck')
56 paranoid = ui.configbool(b'experimental', b'nonnormalparanoidcheck')
57 if devel:
57 if devel:
58 extensions.wrapfunction(dirstatecl, '_writedirstate', _checkdirstate)
58 extensions.wrapfunction(dirstatecl, '_writedirstate', _checkdirstate)
59 if paranoid:
59 if paranoid:
60 # We don't do all these checks when paranoid is disable as it would
60 # We don't do all these checks when paranoid is disable as it would
61 # make the extension run very slowly on large repos
61 # make the extension run very slowly on large repos
62 extensions.wrapfunction(dirstatecl, 'write', _checkdirstate)
62 extensions.wrapfunction(dirstatecl, 'write', _checkdirstate)
63 extensions.wrapfunction(dirstatecl, 'merge', _checkdirstate)
64 extensions.wrapfunction(dirstatecl, 'drop', _checkdirstate)
63 extensions.wrapfunction(dirstatecl, 'drop', _checkdirstate)
65 extensions.wrapfunction(dirstatecl, 'set_tracked', _checkdirstate)
64 extensions.wrapfunction(dirstatecl, 'set_tracked', _checkdirstate)
66 extensions.wrapfunction(dirstatecl, 'set_untracked', _checkdirstate)
65 extensions.wrapfunction(dirstatecl, 'set_untracked', _checkdirstate)
67 extensions.wrapfunction(
66 extensions.wrapfunction(
68 dirstatecl, 'set_possibly_dirty', _checkdirstate
67 dirstatecl, 'set_possibly_dirty', _checkdirstate
69 )
68 )
70 extensions.wrapfunction(
69 extensions.wrapfunction(
71 dirstatecl, 'update_file_p1', _checkdirstate
70 dirstatecl, 'update_file_p1', _checkdirstate
72 )
71 )
73 extensions.wrapfunction(dirstatecl, 'update_file', _checkdirstate)
72 extensions.wrapfunction(dirstatecl, 'update_file', _checkdirstate)
@@ -1,438 +1,437 b''
1 # sparse.py - allow sparse checkouts of the working directory
1 # sparse.py - allow sparse checkouts of the working directory
2 #
2 #
3 # Copyright 2014 Facebook, Inc.
3 # Copyright 2014 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """allow sparse checkouts of the working directory (EXPERIMENTAL)
8 """allow sparse checkouts of the working directory (EXPERIMENTAL)
9
9
10 (This extension is not yet protected by backwards compatibility
10 (This extension is not yet protected by backwards compatibility
11 guarantees. Any aspect may break in future releases until this
11 guarantees. Any aspect may break in future releases until this
12 notice is removed.)
12 notice is removed.)
13
13
14 This extension allows the working directory to only consist of a
14 This extension allows the working directory to only consist of a
15 subset of files for the revision. This allows specific files or
15 subset of files for the revision. This allows specific files or
16 directories to be explicitly included or excluded. Many repository
16 directories to be explicitly included or excluded. Many repository
17 operations have performance proportional to the number of files in
17 operations have performance proportional to the number of files in
18 the working directory. So only realizing a subset of files in the
18 the working directory. So only realizing a subset of files in the
19 working directory can improve performance.
19 working directory can improve performance.
20
20
21 Sparse Config Files
21 Sparse Config Files
22 -------------------
22 -------------------
23
23
24 The set of files that are part of a sparse checkout are defined by
24 The set of files that are part of a sparse checkout are defined by
25 a sparse config file. The file defines 3 things: includes (files to
25 a sparse config file. The file defines 3 things: includes (files to
26 include in the sparse checkout), excludes (files to exclude from the
26 include in the sparse checkout), excludes (files to exclude from the
27 sparse checkout), and profiles (links to other config files).
27 sparse checkout), and profiles (links to other config files).
28
28
29 The file format is newline delimited. Empty lines and lines beginning
29 The file format is newline delimited. Empty lines and lines beginning
30 with ``#`` are ignored.
30 with ``#`` are ignored.
31
31
32 Lines beginning with ``%include `` denote another sparse config file
32 Lines beginning with ``%include `` denote another sparse config file
33 to include. e.g. ``%include tests.sparse``. The filename is relative
33 to include. e.g. ``%include tests.sparse``. The filename is relative
34 to the repository root.
34 to the repository root.
35
35
36 The special lines ``[include]`` and ``[exclude]`` denote the section
36 The special lines ``[include]`` and ``[exclude]`` denote the section
37 for includes and excludes that follow, respectively. It is illegal to
37 for includes and excludes that follow, respectively. It is illegal to
38 have ``[include]`` after ``[exclude]``.
38 have ``[include]`` after ``[exclude]``.
39
39
40 Non-special lines resemble file patterns to be added to either includes
40 Non-special lines resemble file patterns to be added to either includes
41 or excludes. The syntax of these lines is documented by :hg:`help patterns`.
41 or excludes. The syntax of these lines is documented by :hg:`help patterns`.
42 Patterns are interpreted as ``glob:`` by default and match against the
42 Patterns are interpreted as ``glob:`` by default and match against the
43 root of the repository.
43 root of the repository.
44
44
45 Exclusion patterns take precedence over inclusion patterns. So even
45 Exclusion patterns take precedence over inclusion patterns. So even
46 if a file is explicitly included, an ``[exclude]`` entry can remove it.
46 if a file is explicitly included, an ``[exclude]`` entry can remove it.
47
47
48 For example, say you have a repository with 3 directories, ``frontend/``,
48 For example, say you have a repository with 3 directories, ``frontend/``,
49 ``backend/``, and ``tools/``. ``frontend/`` and ``backend/`` correspond
49 ``backend/``, and ``tools/``. ``frontend/`` and ``backend/`` correspond
50 to different projects and it is uncommon for someone working on one
50 to different projects and it is uncommon for someone working on one
51 to need the files for the other. But ``tools/`` contains files shared
51 to need the files for the other. But ``tools/`` contains files shared
52 between both projects. Your sparse config files may resemble::
52 between both projects. Your sparse config files may resemble::
53
53
54 # frontend.sparse
54 # frontend.sparse
55 frontend/**
55 frontend/**
56 tools/**
56 tools/**
57
57
58 # backend.sparse
58 # backend.sparse
59 backend/**
59 backend/**
60 tools/**
60 tools/**
61
61
62 Say the backend grows in size. Or there's a directory with thousands
62 Say the backend grows in size. Or there's a directory with thousands
63 of files you wish to exclude. You can modify the profile to exclude
63 of files you wish to exclude. You can modify the profile to exclude
64 certain files::
64 certain files::
65
65
66 [include]
66 [include]
67 backend/**
67 backend/**
68 tools/**
68 tools/**
69
69
70 [exclude]
70 [exclude]
71 tools/tests/**
71 tools/tests/**
72 """
72 """
73
73
74 from __future__ import absolute_import
74 from __future__ import absolute_import
75
75
76 from mercurial.i18n import _
76 from mercurial.i18n import _
77 from mercurial.pycompat import setattr
77 from mercurial.pycompat import setattr
78 from mercurial import (
78 from mercurial import (
79 commands,
79 commands,
80 dirstate,
80 dirstate,
81 error,
81 error,
82 extensions,
82 extensions,
83 logcmdutil,
83 logcmdutil,
84 match as matchmod,
84 match as matchmod,
85 merge as mergemod,
85 merge as mergemod,
86 pycompat,
86 pycompat,
87 registrar,
87 registrar,
88 sparse,
88 sparse,
89 util,
89 util,
90 )
90 )
91
91
92 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
92 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
93 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
93 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
94 # be specifying the version(s) of Mercurial they are tested with, or
94 # be specifying the version(s) of Mercurial they are tested with, or
95 # leave the attribute unspecified.
95 # leave the attribute unspecified.
96 testedwith = b'ships-with-hg-core'
96 testedwith = b'ships-with-hg-core'
97
97
98 cmdtable = {}
98 cmdtable = {}
99 command = registrar.command(cmdtable)
99 command = registrar.command(cmdtable)
100
100
101
101
102 def extsetup(ui):
102 def extsetup(ui):
103 sparse.enabled = True
103 sparse.enabled = True
104
104
105 _setupclone(ui)
105 _setupclone(ui)
106 _setuplog(ui)
106 _setuplog(ui)
107 _setupadd(ui)
107 _setupadd(ui)
108 _setupdirstate(ui)
108 _setupdirstate(ui)
109
109
110
110
111 def replacefilecache(cls, propname, replacement):
111 def replacefilecache(cls, propname, replacement):
112 """Replace a filecache property with a new class. This allows changing the
112 """Replace a filecache property with a new class. This allows changing the
113 cache invalidation condition."""
113 cache invalidation condition."""
114 origcls = cls
114 origcls = cls
115 assert callable(replacement)
115 assert callable(replacement)
116 while cls is not object:
116 while cls is not object:
117 if propname in cls.__dict__:
117 if propname in cls.__dict__:
118 orig = cls.__dict__[propname]
118 orig = cls.__dict__[propname]
119 setattr(cls, propname, replacement(orig))
119 setattr(cls, propname, replacement(orig))
120 break
120 break
121 cls = cls.__bases__[0]
121 cls = cls.__bases__[0]
122
122
123 if cls is object:
123 if cls is object:
124 raise AttributeError(
124 raise AttributeError(
125 _(b"type '%s' has no property '%s'") % (origcls, propname)
125 _(b"type '%s' has no property '%s'") % (origcls, propname)
126 )
126 )
127
127
128
128
129 def _setuplog(ui):
129 def _setuplog(ui):
130 entry = commands.table[b'log|history']
130 entry = commands.table[b'log|history']
131 entry[1].append(
131 entry[1].append(
132 (
132 (
133 b'',
133 b'',
134 b'sparse',
134 b'sparse',
135 None,
135 None,
136 b"limit to changesets affecting the sparse checkout",
136 b"limit to changesets affecting the sparse checkout",
137 )
137 )
138 )
138 )
139
139
140 def _initialrevs(orig, repo, wopts):
140 def _initialrevs(orig, repo, wopts):
141 revs = orig(repo, wopts)
141 revs = orig(repo, wopts)
142 if wopts.opts.get(b'sparse'):
142 if wopts.opts.get(b'sparse'):
143 sparsematch = sparse.matcher(repo)
143 sparsematch = sparse.matcher(repo)
144
144
145 def ctxmatch(rev):
145 def ctxmatch(rev):
146 ctx = repo[rev]
146 ctx = repo[rev]
147 return any(f for f in ctx.files() if sparsematch(f))
147 return any(f for f in ctx.files() if sparsematch(f))
148
148
149 revs = revs.filter(ctxmatch)
149 revs = revs.filter(ctxmatch)
150 return revs
150 return revs
151
151
152 extensions.wrapfunction(logcmdutil, b'_initialrevs', _initialrevs)
152 extensions.wrapfunction(logcmdutil, b'_initialrevs', _initialrevs)
153
153
154
154
155 def _clonesparsecmd(orig, ui, repo, *args, **opts):
155 def _clonesparsecmd(orig, ui, repo, *args, **opts):
156 include_pat = opts.get('include')
156 include_pat = opts.get('include')
157 exclude_pat = opts.get('exclude')
157 exclude_pat = opts.get('exclude')
158 enableprofile_pat = opts.get('enable_profile')
158 enableprofile_pat = opts.get('enable_profile')
159 narrow_pat = opts.get('narrow')
159 narrow_pat = opts.get('narrow')
160 include = exclude = enableprofile = False
160 include = exclude = enableprofile = False
161 if include_pat:
161 if include_pat:
162 pat = include_pat
162 pat = include_pat
163 include = True
163 include = True
164 if exclude_pat:
164 if exclude_pat:
165 pat = exclude_pat
165 pat = exclude_pat
166 exclude = True
166 exclude = True
167 if enableprofile_pat:
167 if enableprofile_pat:
168 pat = enableprofile_pat
168 pat = enableprofile_pat
169 enableprofile = True
169 enableprofile = True
170 if sum([include, exclude, enableprofile]) > 1:
170 if sum([include, exclude, enableprofile]) > 1:
171 raise error.Abort(_(b"too many flags specified."))
171 raise error.Abort(_(b"too many flags specified."))
172 # if --narrow is passed, it means they are includes and excludes for narrow
172 # if --narrow is passed, it means they are includes and excludes for narrow
173 # clone
173 # clone
174 if not narrow_pat and (include or exclude or enableprofile):
174 if not narrow_pat and (include or exclude or enableprofile):
175
175
176 def clonesparse(orig, ctx, *args, **kwargs):
176 def clonesparse(orig, ctx, *args, **kwargs):
177 sparse.updateconfig(
177 sparse.updateconfig(
178 ctx.repo().unfiltered(),
178 ctx.repo().unfiltered(),
179 pat,
179 pat,
180 {},
180 {},
181 include=include,
181 include=include,
182 exclude=exclude,
182 exclude=exclude,
183 enableprofile=enableprofile,
183 enableprofile=enableprofile,
184 usereporootpaths=True,
184 usereporootpaths=True,
185 )
185 )
186 return orig(ctx, *args, **kwargs)
186 return orig(ctx, *args, **kwargs)
187
187
188 extensions.wrapfunction(mergemod, b'update', clonesparse)
188 extensions.wrapfunction(mergemod, b'update', clonesparse)
189 return orig(ui, repo, *args, **opts)
189 return orig(ui, repo, *args, **opts)
190
190
191
191
192 def _setupclone(ui):
192 def _setupclone(ui):
193 entry = commands.table[b'clone']
193 entry = commands.table[b'clone']
194 entry[1].append((b'', b'enable-profile', [], b'enable a sparse profile'))
194 entry[1].append((b'', b'enable-profile', [], b'enable a sparse profile'))
195 entry[1].append((b'', b'include', [], b'include sparse pattern'))
195 entry[1].append((b'', b'include', [], b'include sparse pattern'))
196 entry[1].append((b'', b'exclude', [], b'exclude sparse pattern'))
196 entry[1].append((b'', b'exclude', [], b'exclude sparse pattern'))
197 extensions.wrapcommand(commands.table, b'clone', _clonesparsecmd)
197 extensions.wrapcommand(commands.table, b'clone', _clonesparsecmd)
198
198
199
199
200 def _setupadd(ui):
200 def _setupadd(ui):
201 entry = commands.table[b'add']
201 entry = commands.table[b'add']
202 entry[1].append(
202 entry[1].append(
203 (
203 (
204 b's',
204 b's',
205 b'sparse',
205 b'sparse',
206 None,
206 None,
207 b'also include directories of added files in sparse config',
207 b'also include directories of added files in sparse config',
208 )
208 )
209 )
209 )
210
210
211 def _add(orig, ui, repo, *pats, **opts):
211 def _add(orig, ui, repo, *pats, **opts):
212 if opts.get('sparse'):
212 if opts.get('sparse'):
213 dirs = set()
213 dirs = set()
214 for pat in pats:
214 for pat in pats:
215 dirname, basename = util.split(pat)
215 dirname, basename = util.split(pat)
216 dirs.add(dirname)
216 dirs.add(dirname)
217 sparse.updateconfig(repo, list(dirs), opts, include=True)
217 sparse.updateconfig(repo, list(dirs), opts, include=True)
218 return orig(ui, repo, *pats, **opts)
218 return orig(ui, repo, *pats, **opts)
219
219
220 extensions.wrapcommand(commands.table, b'add', _add)
220 extensions.wrapcommand(commands.table, b'add', _add)
221
221
222
222
223 def _setupdirstate(ui):
223 def _setupdirstate(ui):
224 """Modify the dirstate to prevent stat'ing excluded files,
224 """Modify the dirstate to prevent stat'ing excluded files,
225 and to prevent modifications to files outside the checkout.
225 and to prevent modifications to files outside the checkout.
226 """
226 """
227
227
228 def walk(orig, self, match, subrepos, unknown, ignored, full=True):
228 def walk(orig, self, match, subrepos, unknown, ignored, full=True):
229 # hack to not exclude explicitly-specified paths so that they can
229 # hack to not exclude explicitly-specified paths so that they can
230 # be warned later on e.g. dirstate.add()
230 # be warned later on e.g. dirstate.add()
231 em = matchmod.exact(match.files())
231 em = matchmod.exact(match.files())
232 sm = matchmod.unionmatcher([self._sparsematcher, em])
232 sm = matchmod.unionmatcher([self._sparsematcher, em])
233 match = matchmod.intersectmatchers(match, sm)
233 match = matchmod.intersectmatchers(match, sm)
234 return orig(self, match, subrepos, unknown, ignored, full)
234 return orig(self, match, subrepos, unknown, ignored, full)
235
235
236 extensions.wrapfunction(dirstate.dirstate, b'walk', walk)
236 extensions.wrapfunction(dirstate.dirstate, b'walk', walk)
237
237
238 # dirstate.rebuild should not add non-matching files
238 # dirstate.rebuild should not add non-matching files
239 def _rebuild(orig, self, parent, allfiles, changedfiles=None):
239 def _rebuild(orig, self, parent, allfiles, changedfiles=None):
240 matcher = self._sparsematcher
240 matcher = self._sparsematcher
241 if not matcher.always():
241 if not matcher.always():
242 allfiles = [f for f in allfiles if matcher(f)]
242 allfiles = [f for f in allfiles if matcher(f)]
243 if changedfiles:
243 if changedfiles:
244 changedfiles = [f for f in changedfiles if matcher(f)]
244 changedfiles = [f for f in changedfiles if matcher(f)]
245
245
246 if changedfiles is not None:
246 if changedfiles is not None:
247 # In _rebuild, these files will be deleted from the dirstate
247 # In _rebuild, these files will be deleted from the dirstate
248 # when they are not found to be in allfiles
248 # when they are not found to be in allfiles
249 dirstatefilestoremove = {f for f in self if not matcher(f)}
249 dirstatefilestoremove = {f for f in self if not matcher(f)}
250 changedfiles = dirstatefilestoremove.union(changedfiles)
250 changedfiles = dirstatefilestoremove.union(changedfiles)
251
251
252 return orig(self, parent, allfiles, changedfiles)
252 return orig(self, parent, allfiles, changedfiles)
253
253
254 extensions.wrapfunction(dirstate.dirstate, b'rebuild', _rebuild)
254 extensions.wrapfunction(dirstate.dirstate, b'rebuild', _rebuild)
255
255
256 # Prevent adding files that are outside the sparse checkout
256 # Prevent adding files that are outside the sparse checkout
257 editfuncs = [
257 editfuncs = [
258 b'set_tracked',
258 b'set_tracked',
259 b'set_untracked',
259 b'set_untracked',
260 b'copy',
260 b'copy',
261 b'merge',
262 ]
261 ]
263 hint = _(
262 hint = _(
264 b'include file with `hg debugsparse --include <pattern>` or use '
263 b'include file with `hg debugsparse --include <pattern>` or use '
265 + b'`hg add -s <file>` to include file directory while adding'
264 + b'`hg add -s <file>` to include file directory while adding'
266 )
265 )
267 for func in editfuncs:
266 for func in editfuncs:
268
267
269 def _wrapper(orig, self, *args, **kwargs):
268 def _wrapper(orig, self, *args, **kwargs):
270 sparsematch = self._sparsematcher
269 sparsematch = self._sparsematcher
271 if not sparsematch.always():
270 if not sparsematch.always():
272 for f in args:
271 for f in args:
273 if f is not None and not sparsematch(f) and f not in self:
272 if f is not None and not sparsematch(f) and f not in self:
274 raise error.Abort(
273 raise error.Abort(
275 _(
274 _(
276 b"cannot add '%s' - it is outside "
275 b"cannot add '%s' - it is outside "
277 b"the sparse checkout"
276 b"the sparse checkout"
278 )
277 )
279 % f,
278 % f,
280 hint=hint,
279 hint=hint,
281 )
280 )
282 return orig(self, *args, **kwargs)
281 return orig(self, *args, **kwargs)
283
282
284 extensions.wrapfunction(dirstate.dirstate, func, _wrapper)
283 extensions.wrapfunction(dirstate.dirstate, func, _wrapper)
285
284
286
285
287 @command(
286 @command(
288 b'debugsparse',
287 b'debugsparse',
289 [
288 [
290 (b'I', b'include', False, _(b'include files in the sparse checkout')),
289 (b'I', b'include', False, _(b'include files in the sparse checkout')),
291 (b'X', b'exclude', False, _(b'exclude files in the sparse checkout')),
290 (b'X', b'exclude', False, _(b'exclude files in the sparse checkout')),
292 (b'd', b'delete', False, _(b'delete an include/exclude rule')),
291 (b'd', b'delete', False, _(b'delete an include/exclude rule')),
293 (
292 (
294 b'f',
293 b'f',
295 b'force',
294 b'force',
296 False,
295 False,
297 _(b'allow changing rules even with pending changes'),
296 _(b'allow changing rules even with pending changes'),
298 ),
297 ),
299 (b'', b'enable-profile', False, _(b'enables the specified profile')),
298 (b'', b'enable-profile', False, _(b'enables the specified profile')),
300 (b'', b'disable-profile', False, _(b'disables the specified profile')),
299 (b'', b'disable-profile', False, _(b'disables the specified profile')),
301 (b'', b'import-rules', False, _(b'imports rules from a file')),
300 (b'', b'import-rules', False, _(b'imports rules from a file')),
302 (b'', b'clear-rules', False, _(b'clears local include/exclude rules')),
301 (b'', b'clear-rules', False, _(b'clears local include/exclude rules')),
303 (
302 (
304 b'',
303 b'',
305 b'refresh',
304 b'refresh',
306 False,
305 False,
307 _(b'updates the working after sparseness changes'),
306 _(b'updates the working after sparseness changes'),
308 ),
307 ),
309 (b'', b'reset', False, _(b'makes the repo full again')),
308 (b'', b'reset', False, _(b'makes the repo full again')),
310 ]
309 ]
311 + commands.templateopts,
310 + commands.templateopts,
312 _(b'[--OPTION] PATTERN...'),
311 _(b'[--OPTION] PATTERN...'),
313 helpbasic=True,
312 helpbasic=True,
314 )
313 )
315 def debugsparse(ui, repo, *pats, **opts):
314 def debugsparse(ui, repo, *pats, **opts):
316 """make the current checkout sparse, or edit the existing checkout
315 """make the current checkout sparse, or edit the existing checkout
317
316
318 The sparse command is used to make the current checkout sparse.
317 The sparse command is used to make the current checkout sparse.
319 This means files that don't meet the sparse condition will not be
318 This means files that don't meet the sparse condition will not be
320 written to disk, or show up in any working copy operations. It does
319 written to disk, or show up in any working copy operations. It does
321 not affect files in history in any way.
320 not affect files in history in any way.
322
321
323 Passing no arguments prints the currently applied sparse rules.
322 Passing no arguments prints the currently applied sparse rules.
324
323
325 --include and --exclude are used to add and remove files from the sparse
324 --include and --exclude are used to add and remove files from the sparse
326 checkout. The effects of adding an include or exclude rule are applied
325 checkout. The effects of adding an include or exclude rule are applied
327 immediately. If applying the new rule would cause a file with pending
326 immediately. If applying the new rule would cause a file with pending
328 changes to be added or removed, the command will fail. Pass --force to
327 changes to be added or removed, the command will fail. Pass --force to
329 force a rule change even with pending changes (the changes on disk will
328 force a rule change even with pending changes (the changes on disk will
330 be preserved).
329 be preserved).
331
330
332 --delete removes an existing include/exclude rule. The effects are
331 --delete removes an existing include/exclude rule. The effects are
333 immediate.
332 immediate.
334
333
335 --refresh refreshes the files on disk based on the sparse rules. This is
334 --refresh refreshes the files on disk based on the sparse rules. This is
336 only necessary if .hg/sparse was changed by hand.
335 only necessary if .hg/sparse was changed by hand.
337
336
338 --enable-profile and --disable-profile accept a path to a .hgsparse file.
337 --enable-profile and --disable-profile accept a path to a .hgsparse file.
339 This allows defining sparse checkouts and tracking them inside the
338 This allows defining sparse checkouts and tracking them inside the
340 repository. This is useful for defining commonly used sparse checkouts for
339 repository. This is useful for defining commonly used sparse checkouts for
341 many people to use. As the profile definition changes over time, the sparse
340 many people to use. As the profile definition changes over time, the sparse
342 checkout will automatically be updated appropriately, depending on which
341 checkout will automatically be updated appropriately, depending on which
343 changeset is checked out. Changes to .hgsparse are not applied until they
342 changeset is checked out. Changes to .hgsparse are not applied until they
344 have been committed.
343 have been committed.
345
344
346 --import-rules accepts a path to a file containing rules in the .hgsparse
345 --import-rules accepts a path to a file containing rules in the .hgsparse
347 format, allowing you to add --include, --exclude and --enable-profile rules
346 format, allowing you to add --include, --exclude and --enable-profile rules
348 in bulk. Like the --include, --exclude and --enable-profile switches, the
347 in bulk. Like the --include, --exclude and --enable-profile switches, the
349 changes are applied immediately.
348 changes are applied immediately.
350
349
351 --clear-rules removes all local include and exclude rules, while leaving
350 --clear-rules removes all local include and exclude rules, while leaving
352 any enabled profiles in place.
351 any enabled profiles in place.
353
352
354 Returns 0 if editing the sparse checkout succeeds.
353 Returns 0 if editing the sparse checkout succeeds.
355 """
354 """
356 opts = pycompat.byteskwargs(opts)
355 opts = pycompat.byteskwargs(opts)
357 include = opts.get(b'include')
356 include = opts.get(b'include')
358 exclude = opts.get(b'exclude')
357 exclude = opts.get(b'exclude')
359 force = opts.get(b'force')
358 force = opts.get(b'force')
360 enableprofile = opts.get(b'enable_profile')
359 enableprofile = opts.get(b'enable_profile')
361 disableprofile = opts.get(b'disable_profile')
360 disableprofile = opts.get(b'disable_profile')
362 importrules = opts.get(b'import_rules')
361 importrules = opts.get(b'import_rules')
363 clearrules = opts.get(b'clear_rules')
362 clearrules = opts.get(b'clear_rules')
364 delete = opts.get(b'delete')
363 delete = opts.get(b'delete')
365 refresh = opts.get(b'refresh')
364 refresh = opts.get(b'refresh')
366 reset = opts.get(b'reset')
365 reset = opts.get(b'reset')
367 count = sum(
366 count = sum(
368 [
367 [
369 include,
368 include,
370 exclude,
369 exclude,
371 enableprofile,
370 enableprofile,
372 disableprofile,
371 disableprofile,
373 delete,
372 delete,
374 importrules,
373 importrules,
375 refresh,
374 refresh,
376 clearrules,
375 clearrules,
377 reset,
376 reset,
378 ]
377 ]
379 )
378 )
380 if count > 1:
379 if count > 1:
381 raise error.Abort(_(b"too many flags specified"))
380 raise error.Abort(_(b"too many flags specified"))
382
381
383 if count == 0:
382 if count == 0:
384 if repo.vfs.exists(b'sparse'):
383 if repo.vfs.exists(b'sparse'):
385 ui.status(repo.vfs.read(b"sparse") + b"\n")
384 ui.status(repo.vfs.read(b"sparse") + b"\n")
386 temporaryincludes = sparse.readtemporaryincludes(repo)
385 temporaryincludes = sparse.readtemporaryincludes(repo)
387 if temporaryincludes:
386 if temporaryincludes:
388 ui.status(
387 ui.status(
389 _(b"Temporarily Included Files (for merge/rebase):\n")
388 _(b"Temporarily Included Files (for merge/rebase):\n")
390 )
389 )
391 ui.status((b"\n".join(temporaryincludes) + b"\n"))
390 ui.status((b"\n".join(temporaryincludes) + b"\n"))
392 return
391 return
393 else:
392 else:
394 raise error.Abort(
393 raise error.Abort(
395 _(
394 _(
396 b'the debugsparse command is only supported on'
395 b'the debugsparse command is only supported on'
397 b' sparse repositories'
396 b' sparse repositories'
398 )
397 )
399 )
398 )
400
399
401 if include or exclude or delete or reset or enableprofile or disableprofile:
400 if include or exclude or delete or reset or enableprofile or disableprofile:
402 sparse.updateconfig(
401 sparse.updateconfig(
403 repo,
402 repo,
404 pats,
403 pats,
405 opts,
404 opts,
406 include=include,
405 include=include,
407 exclude=exclude,
406 exclude=exclude,
408 reset=reset,
407 reset=reset,
409 delete=delete,
408 delete=delete,
410 enableprofile=enableprofile,
409 enableprofile=enableprofile,
411 disableprofile=disableprofile,
410 disableprofile=disableprofile,
412 force=force,
411 force=force,
413 )
412 )
414
413
415 if importrules:
414 if importrules:
416 sparse.importfromfiles(repo, opts, pats, force=force)
415 sparse.importfromfiles(repo, opts, pats, force=force)
417
416
418 if clearrules:
417 if clearrules:
419 sparse.clearrules(repo, force=force)
418 sparse.clearrules(repo, force=force)
420
419
421 if refresh:
420 if refresh:
422 try:
421 try:
423 wlock = repo.wlock()
422 wlock = repo.wlock()
424 fcounts = map(
423 fcounts = map(
425 len,
424 len,
426 sparse.refreshwdir(
425 sparse.refreshwdir(
427 repo, repo.status(), sparse.matcher(repo), force=force
426 repo, repo.status(), sparse.matcher(repo), force=force
428 ),
427 ),
429 )
428 )
430 sparse.printchanges(
429 sparse.printchanges(
431 ui,
430 ui,
432 opts,
431 opts,
433 added=fcounts[0],
432 added=fcounts[0],
434 dropped=fcounts[1],
433 dropped=fcounts[1],
435 conflicting=fcounts[2],
434 conflicting=fcounts[2],
436 )
435 )
437 finally:
436 finally:
438 wlock.release()
437 wlock.release()
@@ -1,1678 +1,1660 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .pycompat import delattr
17 from .pycompat import delattr
18
18
19 from hgdemandimport import tracing
19 from hgdemandimport import tracing
20
20
21 from . import (
21 from . import (
22 dirstatemap,
22 dirstatemap,
23 encoding,
23 encoding,
24 error,
24 error,
25 match as matchmod,
25 match as matchmod,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sparse,
30 sparse,
31 util,
31 util,
32 )
32 )
33
33
34 from .interfaces import (
34 from .interfaces import (
35 dirstate as intdirstate,
35 dirstate as intdirstate,
36 util as interfaceutil,
36 util as interfaceutil,
37 )
37 )
38
38
39 parsers = policy.importmod('parsers')
39 parsers = policy.importmod('parsers')
40 rustmod = policy.importrust('dirstate')
40 rustmod = policy.importrust('dirstate')
41
41
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
42 SUPPORTS_DIRSTATE_V2 = rustmod is not None
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45 filecache = scmutil.filecache
45 filecache = scmutil.filecache
46 _rangemask = dirstatemap.rangemask
46 _rangemask = dirstatemap.rangemask
47
47
48 DirstateItem = parsers.DirstateItem
48 DirstateItem = parsers.DirstateItem
49
49
50
50
51 class repocache(filecache):
51 class repocache(filecache):
52 """filecache for files in .hg/"""
52 """filecache for files in .hg/"""
53
53
54 def join(self, obj, fname):
54 def join(self, obj, fname):
55 return obj._opener.join(fname)
55 return obj._opener.join(fname)
56
56
57
57
58 class rootcache(filecache):
58 class rootcache(filecache):
59 """filecache for files in the repository root"""
59 """filecache for files in the repository root"""
60
60
61 def join(self, obj, fname):
61 def join(self, obj, fname):
62 return obj._join(fname)
62 return obj._join(fname)
63
63
64
64
65 def _getfsnow(vfs):
65 def _getfsnow(vfs):
66 '''Get "now" timestamp on filesystem'''
66 '''Get "now" timestamp on filesystem'''
67 tmpfd, tmpname = vfs.mkstemp()
67 tmpfd, tmpname = vfs.mkstemp()
68 try:
68 try:
69 return os.fstat(tmpfd)[stat.ST_MTIME]
69 return os.fstat(tmpfd)[stat.ST_MTIME]
70 finally:
70 finally:
71 os.close(tmpfd)
71 os.close(tmpfd)
72 vfs.unlink(tmpname)
72 vfs.unlink(tmpname)
73
73
74
74
75 def requires_parents_change(func):
75 def requires_parents_change(func):
76 def wrap(self, *args, **kwargs):
76 def wrap(self, *args, **kwargs):
77 if not self.pendingparentchange():
77 if not self.pendingparentchange():
78 msg = 'calling `%s` outside of a parentchange context'
78 msg = 'calling `%s` outside of a parentchange context'
79 msg %= func.__name__
79 msg %= func.__name__
80 raise error.ProgrammingError(msg)
80 raise error.ProgrammingError(msg)
81 return func(self, *args, **kwargs)
81 return func(self, *args, **kwargs)
82
82
83 return wrap
83 return wrap
84
84
85
85
86 def requires_no_parents_change(func):
86 def requires_no_parents_change(func):
87 def wrap(self, *args, **kwargs):
87 def wrap(self, *args, **kwargs):
88 if self.pendingparentchange():
88 if self.pendingparentchange():
89 msg = 'calling `%s` inside of a parentchange context'
89 msg = 'calling `%s` inside of a parentchange context'
90 msg %= func.__name__
90 msg %= func.__name__
91 raise error.ProgrammingError(msg)
91 raise error.ProgrammingError(msg)
92 return func(self, *args, **kwargs)
92 return func(self, *args, **kwargs)
93
93
94 return wrap
94 return wrap
95
95
96
96
97 @interfaceutil.implementer(intdirstate.idirstate)
97 @interfaceutil.implementer(intdirstate.idirstate)
98 class dirstate(object):
98 class dirstate(object):
99 def __init__(
99 def __init__(
100 self,
100 self,
101 opener,
101 opener,
102 ui,
102 ui,
103 root,
103 root,
104 validate,
104 validate,
105 sparsematchfn,
105 sparsematchfn,
106 nodeconstants,
106 nodeconstants,
107 use_dirstate_v2,
107 use_dirstate_v2,
108 ):
108 ):
109 """Create a new dirstate object.
109 """Create a new dirstate object.
110
110
111 opener is an open()-like callable that can be used to open the
111 opener is an open()-like callable that can be used to open the
112 dirstate file; root is the root of the directory tracked by
112 dirstate file; root is the root of the directory tracked by
113 the dirstate.
113 the dirstate.
114 """
114 """
115 self._use_dirstate_v2 = use_dirstate_v2
115 self._use_dirstate_v2 = use_dirstate_v2
116 self._nodeconstants = nodeconstants
116 self._nodeconstants = nodeconstants
117 self._opener = opener
117 self._opener = opener
118 self._validate = validate
118 self._validate = validate
119 self._root = root
119 self._root = root
120 self._sparsematchfn = sparsematchfn
120 self._sparsematchfn = sparsematchfn
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
121 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
122 # UNC path pointing to root share (issue4557)
122 # UNC path pointing to root share (issue4557)
123 self._rootdir = pathutil.normasprefix(root)
123 self._rootdir = pathutil.normasprefix(root)
124 self._dirty = False
124 self._dirty = False
125 self._lastnormaltime = 0
125 self._lastnormaltime = 0
126 self._ui = ui
126 self._ui = ui
127 self._filecache = {}
127 self._filecache = {}
128 self._parentwriters = 0
128 self._parentwriters = 0
129 self._filename = b'dirstate'
129 self._filename = b'dirstate'
130 self._pendingfilename = b'%s.pending' % self._filename
130 self._pendingfilename = b'%s.pending' % self._filename
131 self._plchangecallbacks = {}
131 self._plchangecallbacks = {}
132 self._origpl = None
132 self._origpl = None
133 self._updatedfiles = set()
133 self._updatedfiles = set()
134 self._mapcls = dirstatemap.dirstatemap
134 self._mapcls = dirstatemap.dirstatemap
135 # Access and cache cwd early, so we don't access it for the first time
135 # Access and cache cwd early, so we don't access it for the first time
136 # after a working-copy update caused it to not exist (accessing it then
136 # after a working-copy update caused it to not exist (accessing it then
137 # raises an exception).
137 # raises an exception).
138 self._cwd
138 self._cwd
139
139
140 def prefetch_parents(self):
140 def prefetch_parents(self):
141 """make sure the parents are loaded
141 """make sure the parents are loaded
142
142
143 Used to avoid a race condition.
143 Used to avoid a race condition.
144 """
144 """
145 self._pl
145 self._pl
146
146
147 @contextlib.contextmanager
147 @contextlib.contextmanager
148 def parentchange(self):
148 def parentchange(self):
149 """Context manager for handling dirstate parents.
149 """Context manager for handling dirstate parents.
150
150
151 If an exception occurs in the scope of the context manager,
151 If an exception occurs in the scope of the context manager,
152 the incoherent dirstate won't be written when wlock is
152 the incoherent dirstate won't be written when wlock is
153 released.
153 released.
154 """
154 """
155 self._parentwriters += 1
155 self._parentwriters += 1
156 yield
156 yield
157 # Typically we want the "undo" step of a context manager in a
157 # Typically we want the "undo" step of a context manager in a
158 # finally block so it happens even when an exception
158 # finally block so it happens even when an exception
159 # occurs. In this case, however, we only want to decrement
159 # occurs. In this case, however, we only want to decrement
160 # parentwriters if the code in the with statement exits
160 # parentwriters if the code in the with statement exits
161 # normally, so we don't have a try/finally here on purpose.
161 # normally, so we don't have a try/finally here on purpose.
162 self._parentwriters -= 1
162 self._parentwriters -= 1
163
163
164 def pendingparentchange(self):
164 def pendingparentchange(self):
165 """Returns true if the dirstate is in the middle of a set of changes
165 """Returns true if the dirstate is in the middle of a set of changes
166 that modify the dirstate parent.
166 that modify the dirstate parent.
167 """
167 """
168 return self._parentwriters > 0
168 return self._parentwriters > 0
169
169
170 @propertycache
170 @propertycache
171 def _map(self):
171 def _map(self):
172 """Return the dirstate contents (see documentation for dirstatemap)."""
172 """Return the dirstate contents (see documentation for dirstatemap)."""
173 self._map = self._mapcls(
173 self._map = self._mapcls(
174 self._ui,
174 self._ui,
175 self._opener,
175 self._opener,
176 self._root,
176 self._root,
177 self._nodeconstants,
177 self._nodeconstants,
178 self._use_dirstate_v2,
178 self._use_dirstate_v2,
179 )
179 )
180 return self._map
180 return self._map
181
181
182 @property
182 @property
183 def _sparsematcher(self):
183 def _sparsematcher(self):
184 """The matcher for the sparse checkout.
184 """The matcher for the sparse checkout.
185
185
186 The working directory may not include every file from a manifest. The
186 The working directory may not include every file from a manifest. The
187 matcher obtained by this property will match a path if it is to be
187 matcher obtained by this property will match a path if it is to be
188 included in the working directory.
188 included in the working directory.
189 """
189 """
190 # TODO there is potential to cache this property. For now, the matcher
190 # TODO there is potential to cache this property. For now, the matcher
191 # is resolved on every access. (But the called function does use a
191 # is resolved on every access. (But the called function does use a
192 # cache to keep the lookup fast.)
192 # cache to keep the lookup fast.)
193 return self._sparsematchfn()
193 return self._sparsematchfn()
194
194
195 @repocache(b'branch')
195 @repocache(b'branch')
196 def _branch(self):
196 def _branch(self):
197 try:
197 try:
198 return self._opener.read(b"branch").strip() or b"default"
198 return self._opener.read(b"branch").strip() or b"default"
199 except IOError as inst:
199 except IOError as inst:
200 if inst.errno != errno.ENOENT:
200 if inst.errno != errno.ENOENT:
201 raise
201 raise
202 return b"default"
202 return b"default"
203
203
204 @property
204 @property
205 def _pl(self):
205 def _pl(self):
206 return self._map.parents()
206 return self._map.parents()
207
207
208 def hasdir(self, d):
208 def hasdir(self, d):
209 return self._map.hastrackeddir(d)
209 return self._map.hastrackeddir(d)
210
210
211 @rootcache(b'.hgignore')
211 @rootcache(b'.hgignore')
212 def _ignore(self):
212 def _ignore(self):
213 files = self._ignorefiles()
213 files = self._ignorefiles()
214 if not files:
214 if not files:
215 return matchmod.never()
215 return matchmod.never()
216
216
217 pats = [b'include:%s' % f for f in files]
217 pats = [b'include:%s' % f for f in files]
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
219
219
220 @propertycache
220 @propertycache
221 def _slash(self):
221 def _slash(self):
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
223
223
224 @propertycache
224 @propertycache
225 def _checklink(self):
225 def _checklink(self):
226 return util.checklink(self._root)
226 return util.checklink(self._root)
227
227
228 @propertycache
228 @propertycache
229 def _checkexec(self):
229 def _checkexec(self):
230 return bool(util.checkexec(self._root))
230 return bool(util.checkexec(self._root))
231
231
232 @propertycache
232 @propertycache
233 def _checkcase(self):
233 def _checkcase(self):
234 return not util.fscasesensitive(self._join(b'.hg'))
234 return not util.fscasesensitive(self._join(b'.hg'))
235
235
236 def _join(self, f):
236 def _join(self, f):
237 # much faster than os.path.join()
237 # much faster than os.path.join()
238 # it's safe because f is always a relative path
238 # it's safe because f is always a relative path
239 return self._rootdir + f
239 return self._rootdir + f
240
240
241 def flagfunc(self, buildfallback):
241 def flagfunc(self, buildfallback):
242 if self._checklink and self._checkexec:
242 if self._checklink and self._checkexec:
243
243
244 def f(x):
244 def f(x):
245 try:
245 try:
246 st = os.lstat(self._join(x))
246 st = os.lstat(self._join(x))
247 if util.statislink(st):
247 if util.statislink(st):
248 return b'l'
248 return b'l'
249 if util.statisexec(st):
249 if util.statisexec(st):
250 return b'x'
250 return b'x'
251 except OSError:
251 except OSError:
252 pass
252 pass
253 return b''
253 return b''
254
254
255 return f
255 return f
256
256
257 fallback = buildfallback()
257 fallback = buildfallback()
258 if self._checklink:
258 if self._checklink:
259
259
260 def f(x):
260 def f(x):
261 if os.path.islink(self._join(x)):
261 if os.path.islink(self._join(x)):
262 return b'l'
262 return b'l'
263 if b'x' in fallback(x):
263 if b'x' in fallback(x):
264 return b'x'
264 return b'x'
265 return b''
265 return b''
266
266
267 return f
267 return f
268 if self._checkexec:
268 if self._checkexec:
269
269
270 def f(x):
270 def f(x):
271 if b'l' in fallback(x):
271 if b'l' in fallback(x):
272 return b'l'
272 return b'l'
273 if util.isexec(self._join(x)):
273 if util.isexec(self._join(x)):
274 return b'x'
274 return b'x'
275 return b''
275 return b''
276
276
277 return f
277 return f
278 else:
278 else:
279 return fallback
279 return fallback
280
280
281 @propertycache
281 @propertycache
282 def _cwd(self):
282 def _cwd(self):
283 # internal config: ui.forcecwd
283 # internal config: ui.forcecwd
284 forcecwd = self._ui.config(b'ui', b'forcecwd')
284 forcecwd = self._ui.config(b'ui', b'forcecwd')
285 if forcecwd:
285 if forcecwd:
286 return forcecwd
286 return forcecwd
287 return encoding.getcwd()
287 return encoding.getcwd()
288
288
289 def getcwd(self):
289 def getcwd(self):
290 """Return the path from which a canonical path is calculated.
290 """Return the path from which a canonical path is calculated.
291
291
292 This path should be used to resolve file patterns or to convert
292 This path should be used to resolve file patterns or to convert
293 canonical paths back to file paths for display. It shouldn't be
293 canonical paths back to file paths for display. It shouldn't be
294 used to get real file paths. Use vfs functions instead.
294 used to get real file paths. Use vfs functions instead.
295 """
295 """
296 cwd = self._cwd
296 cwd = self._cwd
297 if cwd == self._root:
297 if cwd == self._root:
298 return b''
298 return b''
299 # self._root ends with a path separator if self._root is '/' or 'C:\'
299 # self._root ends with a path separator if self._root is '/' or 'C:\'
300 rootsep = self._root
300 rootsep = self._root
301 if not util.endswithsep(rootsep):
301 if not util.endswithsep(rootsep):
302 rootsep += pycompat.ossep
302 rootsep += pycompat.ossep
303 if cwd.startswith(rootsep):
303 if cwd.startswith(rootsep):
304 return cwd[len(rootsep) :]
304 return cwd[len(rootsep) :]
305 else:
305 else:
306 # we're outside the repo. return an absolute path.
306 # we're outside the repo. return an absolute path.
307 return cwd
307 return cwd
308
308
309 def pathto(self, f, cwd=None):
309 def pathto(self, f, cwd=None):
310 if cwd is None:
310 if cwd is None:
311 cwd = self.getcwd()
311 cwd = self.getcwd()
312 path = util.pathto(self._root, cwd, f)
312 path = util.pathto(self._root, cwd, f)
313 if self._slash:
313 if self._slash:
314 return util.pconvert(path)
314 return util.pconvert(path)
315 return path
315 return path
316
316
317 def __getitem__(self, key):
317 def __getitem__(self, key):
318 """Return the current state of key (a filename) in the dirstate.
318 """Return the current state of key (a filename) in the dirstate.
319
319
320 States are:
320 States are:
321 n normal
321 n normal
322 m needs merging
322 m needs merging
323 r marked for removal
323 r marked for removal
324 a marked for addition
324 a marked for addition
325 ? not tracked
325 ? not tracked
326
326
327 XXX The "state" is a bit obscure to be in the "public" API. we should
327 XXX The "state" is a bit obscure to be in the "public" API. we should
328 consider migrating all user of this to going through the dirstate entry
328 consider migrating all user of this to going through the dirstate entry
329 instead.
329 instead.
330 """
330 """
331 entry = self._map.get(key)
331 entry = self._map.get(key)
332 if entry is not None:
332 if entry is not None:
333 return entry.state
333 return entry.state
334 return b'?'
334 return b'?'
335
335
336 def __contains__(self, key):
336 def __contains__(self, key):
337 return key in self._map
337 return key in self._map
338
338
339 def __iter__(self):
339 def __iter__(self):
340 return iter(sorted(self._map))
340 return iter(sorted(self._map))
341
341
342 def items(self):
342 def items(self):
343 return pycompat.iteritems(self._map)
343 return pycompat.iteritems(self._map)
344
344
345 iteritems = items
345 iteritems = items
346
346
347 def directories(self):
347 def directories(self):
348 return self._map.directories()
348 return self._map.directories()
349
349
350 def parents(self):
350 def parents(self):
351 return [self._validate(p) for p in self._pl]
351 return [self._validate(p) for p in self._pl]
352
352
353 def p1(self):
353 def p1(self):
354 return self._validate(self._pl[0])
354 return self._validate(self._pl[0])
355
355
356 def p2(self):
356 def p2(self):
357 return self._validate(self._pl[1])
357 return self._validate(self._pl[1])
358
358
359 @property
359 @property
360 def in_merge(self):
360 def in_merge(self):
361 """True if a merge is in progress"""
361 """True if a merge is in progress"""
362 return self._pl[1] != self._nodeconstants.nullid
362 return self._pl[1] != self._nodeconstants.nullid
363
363
364 def branch(self):
364 def branch(self):
365 return encoding.tolocal(self._branch)
365 return encoding.tolocal(self._branch)
366
366
367 def setparents(self, p1, p2=None):
367 def setparents(self, p1, p2=None):
368 """Set dirstate parents to p1 and p2.
368 """Set dirstate parents to p1 and p2.
369
369
370 When moving from two parents to one, "merged" entries a
370 When moving from two parents to one, "merged" entries a
371 adjusted to normal and previous copy records discarded and
371 adjusted to normal and previous copy records discarded and
372 returned by the call.
372 returned by the call.
373
373
374 See localrepo.setparents()
374 See localrepo.setparents()
375 """
375 """
376 if p2 is None:
376 if p2 is None:
377 p2 = self._nodeconstants.nullid
377 p2 = self._nodeconstants.nullid
378 if self._parentwriters == 0:
378 if self._parentwriters == 0:
379 raise ValueError(
379 raise ValueError(
380 b"cannot set dirstate parent outside of "
380 b"cannot set dirstate parent outside of "
381 b"dirstate.parentchange context manager"
381 b"dirstate.parentchange context manager"
382 )
382 )
383
383
384 self._dirty = True
384 self._dirty = True
385 oldp2 = self._pl[1]
385 oldp2 = self._pl[1]
386 if self._origpl is None:
386 if self._origpl is None:
387 self._origpl = self._pl
387 self._origpl = self._pl
388 self._map.setparents(p1, p2)
388 self._map.setparents(p1, p2)
389 copies = {}
389 copies = {}
390 if (
390 if (
391 oldp2 != self._nodeconstants.nullid
391 oldp2 != self._nodeconstants.nullid
392 and p2 == self._nodeconstants.nullid
392 and p2 == self._nodeconstants.nullid
393 ):
393 ):
394 candidatefiles = self._map.non_normal_or_other_parent_paths()
394 candidatefiles = self._map.non_normal_or_other_parent_paths()
395
395
396 for f in candidatefiles:
396 for f in candidatefiles:
397 s = self._map.get(f)
397 s = self._map.get(f)
398 if s is None:
398 if s is None:
399 continue
399 continue
400
400
401 # Discard "merged" markers when moving away from a merge state
401 # Discard "merged" markers when moving away from a merge state
402 if s.merged:
402 if s.merged:
403 source = self._map.copymap.get(f)
403 source = self._map.copymap.get(f)
404 if source:
404 if source:
405 copies[f] = source
405 copies[f] = source
406 self._normallookup(f)
406 self._normallookup(f)
407 # Also fix up otherparent markers
407 # Also fix up otherparent markers
408 elif s.from_p2:
408 elif s.from_p2:
409 source = self._map.copymap.get(f)
409 source = self._map.copymap.get(f)
410 if source:
410 if source:
411 copies[f] = source
411 copies[f] = source
412 self._add(f)
412 self._add(f)
413 return copies
413 return copies
414
414
415 def setbranch(self, branch):
415 def setbranch(self, branch):
416 self.__class__._branch.set(self, encoding.fromlocal(branch))
416 self.__class__._branch.set(self, encoding.fromlocal(branch))
417 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
417 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
418 try:
418 try:
419 f.write(self._branch + b'\n')
419 f.write(self._branch + b'\n')
420 f.close()
420 f.close()
421
421
422 # make sure filecache has the correct stat info for _branch after
422 # make sure filecache has the correct stat info for _branch after
423 # replacing the underlying file
423 # replacing the underlying file
424 ce = self._filecache[b'_branch']
424 ce = self._filecache[b'_branch']
425 if ce:
425 if ce:
426 ce.refresh()
426 ce.refresh()
427 except: # re-raises
427 except: # re-raises
428 f.discard()
428 f.discard()
429 raise
429 raise
430
430
431 def invalidate(self):
431 def invalidate(self):
432 """Causes the next access to reread the dirstate.
432 """Causes the next access to reread the dirstate.
433
433
434 This is different from localrepo.invalidatedirstate() because it always
434 This is different from localrepo.invalidatedirstate() because it always
435 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
435 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
436 check whether the dirstate has changed before rereading it."""
436 check whether the dirstate has changed before rereading it."""
437
437
438 for a in ("_map", "_branch", "_ignore"):
438 for a in ("_map", "_branch", "_ignore"):
439 if a in self.__dict__:
439 if a in self.__dict__:
440 delattr(self, a)
440 delattr(self, a)
441 self._lastnormaltime = 0
441 self._lastnormaltime = 0
442 self._dirty = False
442 self._dirty = False
443 self._updatedfiles.clear()
443 self._updatedfiles.clear()
444 self._parentwriters = 0
444 self._parentwriters = 0
445 self._origpl = None
445 self._origpl = None
446
446
447 def copy(self, source, dest):
447 def copy(self, source, dest):
448 """Mark dest as a copy of source. Unmark dest if source is None."""
448 """Mark dest as a copy of source. Unmark dest if source is None."""
449 if source == dest:
449 if source == dest:
450 return
450 return
451 self._dirty = True
451 self._dirty = True
452 if source is not None:
452 if source is not None:
453 self._map.copymap[dest] = source
453 self._map.copymap[dest] = source
454 self._updatedfiles.add(source)
454 self._updatedfiles.add(source)
455 self._updatedfiles.add(dest)
455 self._updatedfiles.add(dest)
456 elif self._map.copymap.pop(dest, None):
456 elif self._map.copymap.pop(dest, None):
457 self._updatedfiles.add(dest)
457 self._updatedfiles.add(dest)
458
458
459 def copied(self, file):
459 def copied(self, file):
460 return self._map.copymap.get(file, None)
460 return self._map.copymap.get(file, None)
461
461
462 def copies(self):
462 def copies(self):
463 return self._map.copymap
463 return self._map.copymap
464
464
465 @requires_no_parents_change
465 @requires_no_parents_change
466 def set_tracked(self, filename):
466 def set_tracked(self, filename):
467 """a "public" method for generic code to mark a file as tracked
467 """a "public" method for generic code to mark a file as tracked
468
468
469 This function is to be called outside of "update/merge" case. For
469 This function is to be called outside of "update/merge" case. For
470 example by a command like `hg add X`.
470 example by a command like `hg add X`.
471
471
472 return True the file was previously untracked, False otherwise.
472 return True the file was previously untracked, False otherwise.
473 """
473 """
474 entry = self._map.get(filename)
474 entry = self._map.get(filename)
475 if entry is None:
475 if entry is None:
476 self._add(filename)
476 self._add(filename)
477 return True
477 return True
478 elif not entry.tracked:
478 elif not entry.tracked:
479 self._normallookup(filename)
479 self._normallookup(filename)
480 return True
480 return True
481 # XXX This is probably overkill for more case, but we need this to
481 # XXX This is probably overkill for more case, but we need this to
482 # fully replace the `normallookup` call with `set_tracked` one.
482 # fully replace the `normallookup` call with `set_tracked` one.
483 # Consider smoothing this in the future.
483 # Consider smoothing this in the future.
484 self.set_possibly_dirty(filename)
484 self.set_possibly_dirty(filename)
485 return False
485 return False
486
486
487 @requires_no_parents_change
487 @requires_no_parents_change
488 def set_untracked(self, filename):
488 def set_untracked(self, filename):
489 """a "public" method for generic code to mark a file as untracked
489 """a "public" method for generic code to mark a file as untracked
490
490
491 This function is to be called outside of "update/merge" case. For
491 This function is to be called outside of "update/merge" case. For
492 example by a command like `hg remove X`.
492 example by a command like `hg remove X`.
493
493
494 return True the file was previously tracked, False otherwise.
494 return True the file was previously tracked, False otherwise.
495 """
495 """
496 entry = self._map.get(filename)
496 entry = self._map.get(filename)
497 if entry is None:
497 if entry is None:
498 return False
498 return False
499 elif entry.added:
499 elif entry.added:
500 self._drop(filename)
500 self._drop(filename)
501 return True
501 return True
502 else:
502 else:
503 self._dirty = True
503 self._dirty = True
504 self._updatedfiles.add(filename)
504 self._updatedfiles.add(filename)
505 self._map.set_untracked(filename)
505 self._map.set_untracked(filename)
506 return True
506 return True
507
507
508 @requires_no_parents_change
508 @requires_no_parents_change
509 def set_clean(self, filename, parentfiledata=None):
509 def set_clean(self, filename, parentfiledata=None):
510 """record that the current state of the file on disk is known to be clean"""
510 """record that the current state of the file on disk is known to be clean"""
511 self._dirty = True
511 self._dirty = True
512 self._updatedfiles.add(filename)
512 self._updatedfiles.add(filename)
513 self._normal(filename, parentfiledata=parentfiledata)
513 self._normal(filename, parentfiledata=parentfiledata)
514
514
515 @requires_no_parents_change
515 @requires_no_parents_change
516 def set_possibly_dirty(self, filename):
516 def set_possibly_dirty(self, filename):
517 """record that the current state of the file on disk is unknown"""
517 """record that the current state of the file on disk is unknown"""
518 self._dirty = True
518 self._dirty = True
519 self._updatedfiles.add(filename)
519 self._updatedfiles.add(filename)
520 self._map.set_possibly_dirty(filename)
520 self._map.set_possibly_dirty(filename)
521
521
522 @requires_parents_change
522 @requires_parents_change
523 def update_file_p1(
523 def update_file_p1(
524 self,
524 self,
525 filename,
525 filename,
526 p1_tracked,
526 p1_tracked,
527 ):
527 ):
528 """Set a file as tracked in the parent (or not)
528 """Set a file as tracked in the parent (or not)
529
529
530 This is to be called when adjust the dirstate to a new parent after an history
530 This is to be called when adjust the dirstate to a new parent after an history
531 rewriting operation.
531 rewriting operation.
532
532
533 It should not be called during a merge (p2 != nullid) and only within
533 It should not be called during a merge (p2 != nullid) and only within
534 a `with dirstate.parentchange():` context.
534 a `with dirstate.parentchange():` context.
535 """
535 """
536 if self.in_merge:
536 if self.in_merge:
537 msg = b'update_file_reference should not be called when merging'
537 msg = b'update_file_reference should not be called when merging'
538 raise error.ProgrammingError(msg)
538 raise error.ProgrammingError(msg)
539 entry = self._map.get(filename)
539 entry = self._map.get(filename)
540 if entry is None:
540 if entry is None:
541 wc_tracked = False
541 wc_tracked = False
542 else:
542 else:
543 wc_tracked = entry.tracked
543 wc_tracked = entry.tracked
544 possibly_dirty = False
544 possibly_dirty = False
545 if p1_tracked and wc_tracked:
545 if p1_tracked and wc_tracked:
546 # the underlying reference might have changed, we will have to
546 # the underlying reference might have changed, we will have to
547 # check it.
547 # check it.
548 possibly_dirty = True
548 possibly_dirty = True
549 elif not (p1_tracked or wc_tracked):
549 elif not (p1_tracked or wc_tracked):
550 # the file is no longer relevant to anyone
550 # the file is no longer relevant to anyone
551 self._drop(filename)
551 self._drop(filename)
552 elif (not p1_tracked) and wc_tracked:
552 elif (not p1_tracked) and wc_tracked:
553 if entry is not None and entry.added:
553 if entry is not None and entry.added:
554 return # avoid dropping copy information (maybe?)
554 return # avoid dropping copy information (maybe?)
555 elif p1_tracked and not wc_tracked:
555 elif p1_tracked and not wc_tracked:
556 pass
556 pass
557 else:
557 else:
558 assert False, 'unreachable'
558 assert False, 'unreachable'
559
559
560 # this mean we are doing call for file we do not really care about the
560 # this mean we are doing call for file we do not really care about the
561 # data (eg: added or removed), however this should be a minor overhead
561 # data (eg: added or removed), however this should be a minor overhead
562 # compared to the overall update process calling this.
562 # compared to the overall update process calling this.
563 parentfiledata = None
563 parentfiledata = None
564 if wc_tracked:
564 if wc_tracked:
565 parentfiledata = self._get_filedata(filename)
565 parentfiledata = self._get_filedata(filename)
566
566
567 self._updatedfiles.add(filename)
567 self._updatedfiles.add(filename)
568 self._map.reset_state(
568 self._map.reset_state(
569 filename,
569 filename,
570 wc_tracked,
570 wc_tracked,
571 p1_tracked,
571 p1_tracked,
572 possibly_dirty=possibly_dirty,
572 possibly_dirty=possibly_dirty,
573 parentfiledata=parentfiledata,
573 parentfiledata=parentfiledata,
574 )
574 )
575 if (
575 if (
576 parentfiledata is not None
576 parentfiledata is not None
577 and parentfiledata[2] > self._lastnormaltime
577 and parentfiledata[2] > self._lastnormaltime
578 ):
578 ):
579 # Remember the most recent modification timeslot for status(),
579 # Remember the most recent modification timeslot for status(),
580 # to make sure we won't miss future size-preserving file content
580 # to make sure we won't miss future size-preserving file content
581 # modifications that happen within the same timeslot.
581 # modifications that happen within the same timeslot.
582 self._lastnormaltime = parentfiledata[2]
582 self._lastnormaltime = parentfiledata[2]
583
583
584 @requires_parents_change
584 @requires_parents_change
585 def update_file(
585 def update_file(
586 self,
586 self,
587 filename,
587 filename,
588 wc_tracked,
588 wc_tracked,
589 p1_tracked,
589 p1_tracked,
590 p2_tracked=False,
590 p2_tracked=False,
591 merged=False,
591 merged=False,
592 clean_p1=False,
592 clean_p1=False,
593 clean_p2=False,
593 clean_p2=False,
594 possibly_dirty=False,
594 possibly_dirty=False,
595 parentfiledata=None,
595 parentfiledata=None,
596 ):
596 ):
597 """update the information about a file in the dirstate
597 """update the information about a file in the dirstate
598
598
599 This is to be called when the direstates parent changes to keep track
599 This is to be called when the direstates parent changes to keep track
600 of what is the file situation in regards to the working copy and its parent.
600 of what is the file situation in regards to the working copy and its parent.
601
601
602 This function must be called within a `dirstate.parentchange` context.
602 This function must be called within a `dirstate.parentchange` context.
603
603
604 note: the API is at an early stage and we might need to adjust it
604 note: the API is at an early stage and we might need to adjust it
605 depending of what information ends up being relevant and useful to
605 depending of what information ends up being relevant and useful to
606 other processing.
606 other processing.
607 """
607 """
608 if merged and (clean_p1 or clean_p2):
608 if merged and (clean_p1 or clean_p2):
609 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
609 msg = b'`merged` argument incompatible with `clean_p1`/`clean_p2`'
610 raise error.ProgrammingError(msg)
610 raise error.ProgrammingError(msg)
611
611
612 # note: I do not think we need to double check name clash here since we
612 # note: I do not think we need to double check name clash here since we
613 # are in a update/merge case that should already have taken care of
613 # are in a update/merge case that should already have taken care of
614 # this. The test agrees
614 # this. The test agrees
615
615
616 self._dirty = True
616 self._dirty = True
617 self._updatedfiles.add(filename)
617 self._updatedfiles.add(filename)
618
618
619 need_parent_file_data = (
619 need_parent_file_data = (
620 not (possibly_dirty or clean_p2 or merged)
620 not (possibly_dirty or clean_p2 or merged)
621 and wc_tracked
621 and wc_tracked
622 and p1_tracked
622 and p1_tracked
623 )
623 )
624
624
625 # this mean we are doing call for file we do not really care about the
625 # this mean we are doing call for file we do not really care about the
626 # data (eg: added or removed), however this should be a minor overhead
626 # data (eg: added or removed), however this should be a minor overhead
627 # compared to the overall update process calling this.
627 # compared to the overall update process calling this.
628 if need_parent_file_data:
628 if need_parent_file_data:
629 if parentfiledata is None:
629 if parentfiledata is None:
630 parentfiledata = self._get_filedata(filename)
630 parentfiledata = self._get_filedata(filename)
631 mtime = parentfiledata[2]
631 mtime = parentfiledata[2]
632
632
633 if mtime > self._lastnormaltime:
633 if mtime > self._lastnormaltime:
634 # Remember the most recent modification timeslot for
634 # Remember the most recent modification timeslot for
635 # status(), to make sure we won't miss future
635 # status(), to make sure we won't miss future
636 # size-preserving file content modifications that happen
636 # size-preserving file content modifications that happen
637 # within the same timeslot.
637 # within the same timeslot.
638 self._lastnormaltime = mtime
638 self._lastnormaltime = mtime
639
639
640 self._map.reset_state(
640 self._map.reset_state(
641 filename,
641 filename,
642 wc_tracked,
642 wc_tracked,
643 p1_tracked,
643 p1_tracked,
644 p2_tracked=p2_tracked,
644 p2_tracked=p2_tracked,
645 merged=merged,
645 merged=merged,
646 clean_p1=clean_p1,
646 clean_p1=clean_p1,
647 clean_p2=clean_p2,
647 clean_p2=clean_p2,
648 possibly_dirty=possibly_dirty,
648 possibly_dirty=possibly_dirty,
649 parentfiledata=parentfiledata,
649 parentfiledata=parentfiledata,
650 )
650 )
651 if (
651 if (
652 parentfiledata is not None
652 parentfiledata is not None
653 and parentfiledata[2] > self._lastnormaltime
653 and parentfiledata[2] > self._lastnormaltime
654 ):
654 ):
655 # Remember the most recent modification timeslot for status(),
655 # Remember the most recent modification timeslot for status(),
656 # to make sure we won't miss future size-preserving file content
656 # to make sure we won't miss future size-preserving file content
657 # modifications that happen within the same timeslot.
657 # modifications that happen within the same timeslot.
658 self._lastnormaltime = parentfiledata[2]
658 self._lastnormaltime = parentfiledata[2]
659
659
660 def _addpath(
660 def _addpath(
661 self,
661 self,
662 f,
662 f,
663 mode=0,
663 mode=0,
664 size=None,
664 size=None,
665 mtime=None,
665 mtime=None,
666 added=False,
666 added=False,
667 merged=False,
667 merged=False,
668 from_p2=False,
668 from_p2=False,
669 possibly_dirty=False,
669 possibly_dirty=False,
670 ):
670 ):
671 entry = self._map.get(f)
671 entry = self._map.get(f)
672 if added or entry is not None and entry.removed:
672 if added or entry is not None and entry.removed:
673 scmutil.checkfilename(f)
673 scmutil.checkfilename(f)
674 if self._map.hastrackeddir(f):
674 if self._map.hastrackeddir(f):
675 msg = _(b'directory %r already in dirstate')
675 msg = _(b'directory %r already in dirstate')
676 msg %= pycompat.bytestr(f)
676 msg %= pycompat.bytestr(f)
677 raise error.Abort(msg)
677 raise error.Abort(msg)
678 # shadows
678 # shadows
679 for d in pathutil.finddirs(f):
679 for d in pathutil.finddirs(f):
680 if self._map.hastrackeddir(d):
680 if self._map.hastrackeddir(d):
681 break
681 break
682 entry = self._map.get(d)
682 entry = self._map.get(d)
683 if entry is not None and not entry.removed:
683 if entry is not None and not entry.removed:
684 msg = _(b'file %r in dirstate clashes with %r')
684 msg = _(b'file %r in dirstate clashes with %r')
685 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
685 msg %= (pycompat.bytestr(d), pycompat.bytestr(f))
686 raise error.Abort(msg)
686 raise error.Abort(msg)
687 self._dirty = True
687 self._dirty = True
688 self._updatedfiles.add(f)
688 self._updatedfiles.add(f)
689 self._map.addfile(
689 self._map.addfile(
690 f,
690 f,
691 mode=mode,
691 mode=mode,
692 size=size,
692 size=size,
693 mtime=mtime,
693 mtime=mtime,
694 added=added,
694 added=added,
695 merged=merged,
695 merged=merged,
696 from_p2=from_p2,
696 from_p2=from_p2,
697 possibly_dirty=possibly_dirty,
697 possibly_dirty=possibly_dirty,
698 )
698 )
699
699
700 def _get_filedata(self, filename):
700 def _get_filedata(self, filename):
701 """returns"""
701 """returns"""
702 s = os.lstat(self._join(filename))
702 s = os.lstat(self._join(filename))
703 mode = s.st_mode
703 mode = s.st_mode
704 size = s.st_size
704 size = s.st_size
705 mtime = s[stat.ST_MTIME]
705 mtime = s[stat.ST_MTIME]
706 return (mode, size, mtime)
706 return (mode, size, mtime)
707
707
708 def _normal(self, f, parentfiledata=None):
708 def _normal(self, f, parentfiledata=None):
709 if parentfiledata:
709 if parentfiledata:
710 (mode, size, mtime) = parentfiledata
710 (mode, size, mtime) = parentfiledata
711 else:
711 else:
712 (mode, size, mtime) = self._get_filedata(f)
712 (mode, size, mtime) = self._get_filedata(f)
713 self._addpath(f, mode=mode, size=size, mtime=mtime)
713 self._addpath(f, mode=mode, size=size, mtime=mtime)
714 self._map.copymap.pop(f, None)
714 self._map.copymap.pop(f, None)
715 if f in self._map.nonnormalset:
715 if f in self._map.nonnormalset:
716 self._map.nonnormalset.remove(f)
716 self._map.nonnormalset.remove(f)
717 if mtime > self._lastnormaltime:
717 if mtime > self._lastnormaltime:
718 # Remember the most recent modification timeslot for status(),
718 # Remember the most recent modification timeslot for status(),
719 # to make sure we won't miss future size-preserving file content
719 # to make sure we won't miss future size-preserving file content
720 # modifications that happen within the same timeslot.
720 # modifications that happen within the same timeslot.
721 self._lastnormaltime = mtime
721 self._lastnormaltime = mtime
722
722
723 def _normallookup(self, f):
723 def _normallookup(self, f):
724 '''Mark a file normal, but possibly dirty.'''
724 '''Mark a file normal, but possibly dirty.'''
725 if self.in_merge:
725 if self.in_merge:
726 # if there is a merge going on and the file was either
726 # if there is a merge going on and the file was either
727 # "merged" or coming from other parent (-2) before
727 # "merged" or coming from other parent (-2) before
728 # being removed, restore that state.
728 # being removed, restore that state.
729 entry = self._map.get(f)
729 entry = self._map.get(f)
730 if entry is not None:
730 if entry is not None:
731 # XXX this should probably be dealt with a a lower level
731 # XXX this should probably be dealt with a a lower level
732 # (see `merged_removed` and `from_p2_removed`)
732 # (see `merged_removed` and `from_p2_removed`)
733 if entry.merged_removed or entry.from_p2_removed:
733 if entry.merged_removed or entry.from_p2_removed:
734 source = self._map.copymap.get(f)
734 source = self._map.copymap.get(f)
735 if entry.merged_removed:
735 if entry.merged_removed:
736 self._merge(f)
736 self._merge(f)
737 elif entry.from_p2_removed:
737 elif entry.from_p2_removed:
738 self._otherparent(f)
738 self._otherparent(f)
739 if source is not None:
739 if source is not None:
740 self.copy(source, f)
740 self.copy(source, f)
741 return
741 return
742 elif entry.merged or entry.from_p2:
742 elif entry.merged or entry.from_p2:
743 return
743 return
744 self._addpath(f, possibly_dirty=True)
744 self._addpath(f, possibly_dirty=True)
745 self._map.copymap.pop(f, None)
745 self._map.copymap.pop(f, None)
746
746
747 def _otherparent(self, f):
747 def _otherparent(self, f):
748 if not self.in_merge:
748 if not self.in_merge:
749 msg = _(b"setting %r to other parent only allowed in merges") % f
749 msg = _(b"setting %r to other parent only allowed in merges") % f
750 raise error.Abort(msg)
750 raise error.Abort(msg)
751 entry = self._map.get(f)
751 entry = self._map.get(f)
752 if entry is not None and entry.tracked:
752 if entry is not None and entry.tracked:
753 # merge-like
753 # merge-like
754 self._addpath(f, merged=True)
754 self._addpath(f, merged=True)
755 else:
755 else:
756 # add-like
756 # add-like
757 self._addpath(f, from_p2=True)
757 self._addpath(f, from_p2=True)
758 self._map.copymap.pop(f, None)
758 self._map.copymap.pop(f, None)
759
759
760 def _add(self, filename):
760 def _add(self, filename):
761 """internal function to mark a file as added"""
761 """internal function to mark a file as added"""
762 self._addpath(filename, added=True)
762 self._addpath(filename, added=True)
763 self._map.copymap.pop(filename, None)
763 self._map.copymap.pop(filename, None)
764
764
765 def merge(self, f):
766 '''Mark a file merged.'''
767 if self.pendingparentchange():
768 util.nouideprecwarn(
769 b"do not use `merge` inside of update/merge context."
770 b" Use `update_file`",
771 b'6.0',
772 stacklevel=2,
773 )
774 else:
775 util.nouideprecwarn(
776 b"do not use `merge` outside of update/merge context."
777 b"It should have been set by the update/merge code",
778 b'6.0',
779 stacklevel=2,
780 )
781 self._merge(f)
782
783 def _merge(self, f):
765 def _merge(self, f):
784 if not self.in_merge:
766 if not self.in_merge:
785 return self._normallookup(f)
767 return self._normallookup(f)
786 return self._otherparent(f)
768 return self._otherparent(f)
787
769
788 def drop(self, f):
770 def drop(self, f):
789 '''Drop a file from the dirstate'''
771 '''Drop a file from the dirstate'''
790 if self.pendingparentchange():
772 if self.pendingparentchange():
791 util.nouideprecwarn(
773 util.nouideprecwarn(
792 b"do not use `drop` inside of update/merge context."
774 b"do not use `drop` inside of update/merge context."
793 b" Use `update_file`",
775 b" Use `update_file`",
794 b'6.0',
776 b'6.0',
795 stacklevel=2,
777 stacklevel=2,
796 )
778 )
797 else:
779 else:
798 util.nouideprecwarn(
780 util.nouideprecwarn(
799 b"do not use `drop` outside of update/merge context."
781 b"do not use `drop` outside of update/merge context."
800 b" Use `set_untracked`",
782 b" Use `set_untracked`",
801 b'6.0',
783 b'6.0',
802 stacklevel=2,
784 stacklevel=2,
803 )
785 )
804 self._drop(f)
786 self._drop(f)
805
787
806 def _drop(self, filename):
788 def _drop(self, filename):
807 """internal function to drop a file from the dirstate"""
789 """internal function to drop a file from the dirstate"""
808 if self._map.dropfile(filename):
790 if self._map.dropfile(filename):
809 self._dirty = True
791 self._dirty = True
810 self._updatedfiles.add(filename)
792 self._updatedfiles.add(filename)
811 self._map.copymap.pop(filename, None)
793 self._map.copymap.pop(filename, None)
812
794
813 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
795 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
814 if exists is None:
796 if exists is None:
815 exists = os.path.lexists(os.path.join(self._root, path))
797 exists = os.path.lexists(os.path.join(self._root, path))
816 if not exists:
798 if not exists:
817 # Maybe a path component exists
799 # Maybe a path component exists
818 if not ignoremissing and b'/' in path:
800 if not ignoremissing and b'/' in path:
819 d, f = path.rsplit(b'/', 1)
801 d, f = path.rsplit(b'/', 1)
820 d = self._normalize(d, False, ignoremissing, None)
802 d = self._normalize(d, False, ignoremissing, None)
821 folded = d + b"/" + f
803 folded = d + b"/" + f
822 else:
804 else:
823 # No path components, preserve original case
805 # No path components, preserve original case
824 folded = path
806 folded = path
825 else:
807 else:
826 # recursively normalize leading directory components
808 # recursively normalize leading directory components
827 # against dirstate
809 # against dirstate
828 if b'/' in normed:
810 if b'/' in normed:
829 d, f = normed.rsplit(b'/', 1)
811 d, f = normed.rsplit(b'/', 1)
830 d = self._normalize(d, False, ignoremissing, True)
812 d = self._normalize(d, False, ignoremissing, True)
831 r = self._root + b"/" + d
813 r = self._root + b"/" + d
832 folded = d + b"/" + util.fspath(f, r)
814 folded = d + b"/" + util.fspath(f, r)
833 else:
815 else:
834 folded = util.fspath(normed, self._root)
816 folded = util.fspath(normed, self._root)
835 storemap[normed] = folded
817 storemap[normed] = folded
836
818
837 return folded
819 return folded
838
820
839 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
821 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
840 normed = util.normcase(path)
822 normed = util.normcase(path)
841 folded = self._map.filefoldmap.get(normed, None)
823 folded = self._map.filefoldmap.get(normed, None)
842 if folded is None:
824 if folded is None:
843 if isknown:
825 if isknown:
844 folded = path
826 folded = path
845 else:
827 else:
846 folded = self._discoverpath(
828 folded = self._discoverpath(
847 path, normed, ignoremissing, exists, self._map.filefoldmap
829 path, normed, ignoremissing, exists, self._map.filefoldmap
848 )
830 )
849 return folded
831 return folded
850
832
851 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
833 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
852 normed = util.normcase(path)
834 normed = util.normcase(path)
853 folded = self._map.filefoldmap.get(normed, None)
835 folded = self._map.filefoldmap.get(normed, None)
854 if folded is None:
836 if folded is None:
855 folded = self._map.dirfoldmap.get(normed, None)
837 folded = self._map.dirfoldmap.get(normed, None)
856 if folded is None:
838 if folded is None:
857 if isknown:
839 if isknown:
858 folded = path
840 folded = path
859 else:
841 else:
860 # store discovered result in dirfoldmap so that future
842 # store discovered result in dirfoldmap so that future
861 # normalizefile calls don't start matching directories
843 # normalizefile calls don't start matching directories
862 folded = self._discoverpath(
844 folded = self._discoverpath(
863 path, normed, ignoremissing, exists, self._map.dirfoldmap
845 path, normed, ignoremissing, exists, self._map.dirfoldmap
864 )
846 )
865 return folded
847 return folded
866
848
867 def normalize(self, path, isknown=False, ignoremissing=False):
849 def normalize(self, path, isknown=False, ignoremissing=False):
868 """
850 """
869 normalize the case of a pathname when on a casefolding filesystem
851 normalize the case of a pathname when on a casefolding filesystem
870
852
871 isknown specifies whether the filename came from walking the
853 isknown specifies whether the filename came from walking the
872 disk, to avoid extra filesystem access.
854 disk, to avoid extra filesystem access.
873
855
874 If ignoremissing is True, missing path are returned
856 If ignoremissing is True, missing path are returned
875 unchanged. Otherwise, we try harder to normalize possibly
857 unchanged. Otherwise, we try harder to normalize possibly
876 existing path components.
858 existing path components.
877
859
878 The normalized case is determined based on the following precedence:
860 The normalized case is determined based on the following precedence:
879
861
880 - version of name already stored in the dirstate
862 - version of name already stored in the dirstate
881 - version of name stored on disk
863 - version of name stored on disk
882 - version provided via command arguments
864 - version provided via command arguments
883 """
865 """
884
866
885 if self._checkcase:
867 if self._checkcase:
886 return self._normalize(path, isknown, ignoremissing)
868 return self._normalize(path, isknown, ignoremissing)
887 return path
869 return path
888
870
889 def clear(self):
871 def clear(self):
890 self._map.clear()
872 self._map.clear()
891 self._lastnormaltime = 0
873 self._lastnormaltime = 0
892 self._updatedfiles.clear()
874 self._updatedfiles.clear()
893 self._dirty = True
875 self._dirty = True
894
876
895 def rebuild(self, parent, allfiles, changedfiles=None):
877 def rebuild(self, parent, allfiles, changedfiles=None):
896 if changedfiles is None:
878 if changedfiles is None:
897 # Rebuild entire dirstate
879 # Rebuild entire dirstate
898 to_lookup = allfiles
880 to_lookup = allfiles
899 to_drop = []
881 to_drop = []
900 lastnormaltime = self._lastnormaltime
882 lastnormaltime = self._lastnormaltime
901 self.clear()
883 self.clear()
902 self._lastnormaltime = lastnormaltime
884 self._lastnormaltime = lastnormaltime
903 elif len(changedfiles) < 10:
885 elif len(changedfiles) < 10:
904 # Avoid turning allfiles into a set, which can be expensive if it's
886 # Avoid turning allfiles into a set, which can be expensive if it's
905 # large.
887 # large.
906 to_lookup = []
888 to_lookup = []
907 to_drop = []
889 to_drop = []
908 for f in changedfiles:
890 for f in changedfiles:
909 if f in allfiles:
891 if f in allfiles:
910 to_lookup.append(f)
892 to_lookup.append(f)
911 else:
893 else:
912 to_drop.append(f)
894 to_drop.append(f)
913 else:
895 else:
914 changedfilesset = set(changedfiles)
896 changedfilesset = set(changedfiles)
915 to_lookup = changedfilesset & set(allfiles)
897 to_lookup = changedfilesset & set(allfiles)
916 to_drop = changedfilesset - to_lookup
898 to_drop = changedfilesset - to_lookup
917
899
918 if self._origpl is None:
900 if self._origpl is None:
919 self._origpl = self._pl
901 self._origpl = self._pl
920 self._map.setparents(parent, self._nodeconstants.nullid)
902 self._map.setparents(parent, self._nodeconstants.nullid)
921
903
922 for f in to_lookup:
904 for f in to_lookup:
923 self._normallookup(f)
905 self._normallookup(f)
924 for f in to_drop:
906 for f in to_drop:
925 self._drop(f)
907 self._drop(f)
926
908
927 self._dirty = True
909 self._dirty = True
928
910
929 def identity(self):
911 def identity(self):
930 """Return identity of dirstate itself to detect changing in storage
912 """Return identity of dirstate itself to detect changing in storage
931
913
932 If identity of previous dirstate is equal to this, writing
914 If identity of previous dirstate is equal to this, writing
933 changes based on the former dirstate out can keep consistency.
915 changes based on the former dirstate out can keep consistency.
934 """
916 """
935 return self._map.identity
917 return self._map.identity
936
918
937 def write(self, tr):
919 def write(self, tr):
938 if not self._dirty:
920 if not self._dirty:
939 return
921 return
940
922
941 filename = self._filename
923 filename = self._filename
942 if tr:
924 if tr:
943 # 'dirstate.write()' is not only for writing in-memory
925 # 'dirstate.write()' is not only for writing in-memory
944 # changes out, but also for dropping ambiguous timestamp.
926 # changes out, but also for dropping ambiguous timestamp.
945 # delayed writing re-raise "ambiguous timestamp issue".
927 # delayed writing re-raise "ambiguous timestamp issue".
946 # See also the wiki page below for detail:
928 # See also the wiki page below for detail:
947 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
929 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
948
930
949 # emulate dropping timestamp in 'parsers.pack_dirstate'
931 # emulate dropping timestamp in 'parsers.pack_dirstate'
950 now = _getfsnow(self._opener)
932 now = _getfsnow(self._opener)
951 self._map.clearambiguoustimes(self._updatedfiles, now)
933 self._map.clearambiguoustimes(self._updatedfiles, now)
952
934
953 # emulate that all 'dirstate.normal' results are written out
935 # emulate that all 'dirstate.normal' results are written out
954 self._lastnormaltime = 0
936 self._lastnormaltime = 0
955 self._updatedfiles.clear()
937 self._updatedfiles.clear()
956
938
957 # delay writing in-memory changes out
939 # delay writing in-memory changes out
958 tr.addfilegenerator(
940 tr.addfilegenerator(
959 b'dirstate',
941 b'dirstate',
960 (self._filename,),
942 (self._filename,),
961 lambda f: self._writedirstate(tr, f),
943 lambda f: self._writedirstate(tr, f),
962 location=b'plain',
944 location=b'plain',
963 )
945 )
964 return
946 return
965
947
966 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
948 st = self._opener(filename, b"w", atomictemp=True, checkambig=True)
967 self._writedirstate(tr, st)
949 self._writedirstate(tr, st)
968
950
969 def addparentchangecallback(self, category, callback):
951 def addparentchangecallback(self, category, callback):
970 """add a callback to be called when the wd parents are changed
952 """add a callback to be called when the wd parents are changed
971
953
972 Callback will be called with the following arguments:
954 Callback will be called with the following arguments:
973 dirstate, (oldp1, oldp2), (newp1, newp2)
955 dirstate, (oldp1, oldp2), (newp1, newp2)
974
956
975 Category is a unique identifier to allow overwriting an old callback
957 Category is a unique identifier to allow overwriting an old callback
976 with a newer callback.
958 with a newer callback.
977 """
959 """
978 self._plchangecallbacks[category] = callback
960 self._plchangecallbacks[category] = callback
979
961
980 def _writedirstate(self, tr, st):
962 def _writedirstate(self, tr, st):
981 # notify callbacks about parents change
963 # notify callbacks about parents change
982 if self._origpl is not None and self._origpl != self._pl:
964 if self._origpl is not None and self._origpl != self._pl:
983 for c, callback in sorted(
965 for c, callback in sorted(
984 pycompat.iteritems(self._plchangecallbacks)
966 pycompat.iteritems(self._plchangecallbacks)
985 ):
967 ):
986 callback(self, self._origpl, self._pl)
968 callback(self, self._origpl, self._pl)
987 self._origpl = None
969 self._origpl = None
988 # use the modification time of the newly created temporary file as the
970 # use the modification time of the newly created temporary file as the
989 # filesystem's notion of 'now'
971 # filesystem's notion of 'now'
990 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
972 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
991
973
992 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
974 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
993 # timestamp of each entries in dirstate, because of 'now > mtime'
975 # timestamp of each entries in dirstate, because of 'now > mtime'
994 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
976 delaywrite = self._ui.configint(b'debug', b'dirstate.delaywrite')
995 if delaywrite > 0:
977 if delaywrite > 0:
996 # do we have any files to delay for?
978 # do we have any files to delay for?
997 for f, e in pycompat.iteritems(self._map):
979 for f, e in pycompat.iteritems(self._map):
998 if e.need_delay(now):
980 if e.need_delay(now):
999 import time # to avoid useless import
981 import time # to avoid useless import
1000
982
1001 # rather than sleep n seconds, sleep until the next
983 # rather than sleep n seconds, sleep until the next
1002 # multiple of n seconds
984 # multiple of n seconds
1003 clock = time.time()
985 clock = time.time()
1004 start = int(clock) - (int(clock) % delaywrite)
986 start = int(clock) - (int(clock) % delaywrite)
1005 end = start + delaywrite
987 end = start + delaywrite
1006 time.sleep(end - clock)
988 time.sleep(end - clock)
1007 now = end # trust our estimate that the end is near now
989 now = end # trust our estimate that the end is near now
1008 break
990 break
1009
991
1010 self._map.write(tr, st, now)
992 self._map.write(tr, st, now)
1011 self._lastnormaltime = 0
993 self._lastnormaltime = 0
1012 self._dirty = False
994 self._dirty = False
1013
995
1014 def _dirignore(self, f):
996 def _dirignore(self, f):
1015 if self._ignore(f):
997 if self._ignore(f):
1016 return True
998 return True
1017 for p in pathutil.finddirs(f):
999 for p in pathutil.finddirs(f):
1018 if self._ignore(p):
1000 if self._ignore(p):
1019 return True
1001 return True
1020 return False
1002 return False
1021
1003
1022 def _ignorefiles(self):
1004 def _ignorefiles(self):
1023 files = []
1005 files = []
1024 if os.path.exists(self._join(b'.hgignore')):
1006 if os.path.exists(self._join(b'.hgignore')):
1025 files.append(self._join(b'.hgignore'))
1007 files.append(self._join(b'.hgignore'))
1026 for name, path in self._ui.configitems(b"ui"):
1008 for name, path in self._ui.configitems(b"ui"):
1027 if name == b'ignore' or name.startswith(b'ignore.'):
1009 if name == b'ignore' or name.startswith(b'ignore.'):
1028 # we need to use os.path.join here rather than self._join
1010 # we need to use os.path.join here rather than self._join
1029 # because path is arbitrary and user-specified
1011 # because path is arbitrary and user-specified
1030 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1012 files.append(os.path.join(self._rootdir, util.expandpath(path)))
1031 return files
1013 return files
1032
1014
1033 def _ignorefileandline(self, f):
1015 def _ignorefileandline(self, f):
1034 files = collections.deque(self._ignorefiles())
1016 files = collections.deque(self._ignorefiles())
1035 visited = set()
1017 visited = set()
1036 while files:
1018 while files:
1037 i = files.popleft()
1019 i = files.popleft()
1038 patterns = matchmod.readpatternfile(
1020 patterns = matchmod.readpatternfile(
1039 i, self._ui.warn, sourceinfo=True
1021 i, self._ui.warn, sourceinfo=True
1040 )
1022 )
1041 for pattern, lineno, line in patterns:
1023 for pattern, lineno, line in patterns:
1042 kind, p = matchmod._patsplit(pattern, b'glob')
1024 kind, p = matchmod._patsplit(pattern, b'glob')
1043 if kind == b"subinclude":
1025 if kind == b"subinclude":
1044 if p not in visited:
1026 if p not in visited:
1045 files.append(p)
1027 files.append(p)
1046 continue
1028 continue
1047 m = matchmod.match(
1029 m = matchmod.match(
1048 self._root, b'', [], [pattern], warn=self._ui.warn
1030 self._root, b'', [], [pattern], warn=self._ui.warn
1049 )
1031 )
1050 if m(f):
1032 if m(f):
1051 return (i, lineno, line)
1033 return (i, lineno, line)
1052 visited.add(i)
1034 visited.add(i)
1053 return (None, -1, b"")
1035 return (None, -1, b"")
1054
1036
1055 def _walkexplicit(self, match, subrepos):
1037 def _walkexplicit(self, match, subrepos):
1056 """Get stat data about the files explicitly specified by match.
1038 """Get stat data about the files explicitly specified by match.
1057
1039
1058 Return a triple (results, dirsfound, dirsnotfound).
1040 Return a triple (results, dirsfound, dirsnotfound).
1059 - results is a mapping from filename to stat result. It also contains
1041 - results is a mapping from filename to stat result. It also contains
1060 listings mapping subrepos and .hg to None.
1042 listings mapping subrepos and .hg to None.
1061 - dirsfound is a list of files found to be directories.
1043 - dirsfound is a list of files found to be directories.
1062 - dirsnotfound is a list of files that the dirstate thinks are
1044 - dirsnotfound is a list of files that the dirstate thinks are
1063 directories and that were not found."""
1045 directories and that were not found."""
1064
1046
1065 def badtype(mode):
1047 def badtype(mode):
1066 kind = _(b'unknown')
1048 kind = _(b'unknown')
1067 if stat.S_ISCHR(mode):
1049 if stat.S_ISCHR(mode):
1068 kind = _(b'character device')
1050 kind = _(b'character device')
1069 elif stat.S_ISBLK(mode):
1051 elif stat.S_ISBLK(mode):
1070 kind = _(b'block device')
1052 kind = _(b'block device')
1071 elif stat.S_ISFIFO(mode):
1053 elif stat.S_ISFIFO(mode):
1072 kind = _(b'fifo')
1054 kind = _(b'fifo')
1073 elif stat.S_ISSOCK(mode):
1055 elif stat.S_ISSOCK(mode):
1074 kind = _(b'socket')
1056 kind = _(b'socket')
1075 elif stat.S_ISDIR(mode):
1057 elif stat.S_ISDIR(mode):
1076 kind = _(b'directory')
1058 kind = _(b'directory')
1077 return _(b'unsupported file type (type is %s)') % kind
1059 return _(b'unsupported file type (type is %s)') % kind
1078
1060
1079 badfn = match.bad
1061 badfn = match.bad
1080 dmap = self._map
1062 dmap = self._map
1081 lstat = os.lstat
1063 lstat = os.lstat
1082 getkind = stat.S_IFMT
1064 getkind = stat.S_IFMT
1083 dirkind = stat.S_IFDIR
1065 dirkind = stat.S_IFDIR
1084 regkind = stat.S_IFREG
1066 regkind = stat.S_IFREG
1085 lnkkind = stat.S_IFLNK
1067 lnkkind = stat.S_IFLNK
1086 join = self._join
1068 join = self._join
1087 dirsfound = []
1069 dirsfound = []
1088 foundadd = dirsfound.append
1070 foundadd = dirsfound.append
1089 dirsnotfound = []
1071 dirsnotfound = []
1090 notfoundadd = dirsnotfound.append
1072 notfoundadd = dirsnotfound.append
1091
1073
1092 if not match.isexact() and self._checkcase:
1074 if not match.isexact() and self._checkcase:
1093 normalize = self._normalize
1075 normalize = self._normalize
1094 else:
1076 else:
1095 normalize = None
1077 normalize = None
1096
1078
1097 files = sorted(match.files())
1079 files = sorted(match.files())
1098 subrepos.sort()
1080 subrepos.sort()
1099 i, j = 0, 0
1081 i, j = 0, 0
1100 while i < len(files) and j < len(subrepos):
1082 while i < len(files) and j < len(subrepos):
1101 subpath = subrepos[j] + b"/"
1083 subpath = subrepos[j] + b"/"
1102 if files[i] < subpath:
1084 if files[i] < subpath:
1103 i += 1
1085 i += 1
1104 continue
1086 continue
1105 while i < len(files) and files[i].startswith(subpath):
1087 while i < len(files) and files[i].startswith(subpath):
1106 del files[i]
1088 del files[i]
1107 j += 1
1089 j += 1
1108
1090
1109 if not files or b'' in files:
1091 if not files or b'' in files:
1110 files = [b'']
1092 files = [b'']
1111 # constructing the foldmap is expensive, so don't do it for the
1093 # constructing the foldmap is expensive, so don't do it for the
1112 # common case where files is ['']
1094 # common case where files is ['']
1113 normalize = None
1095 normalize = None
1114 results = dict.fromkeys(subrepos)
1096 results = dict.fromkeys(subrepos)
1115 results[b'.hg'] = None
1097 results[b'.hg'] = None
1116
1098
1117 for ff in files:
1099 for ff in files:
1118 if normalize:
1100 if normalize:
1119 nf = normalize(ff, False, True)
1101 nf = normalize(ff, False, True)
1120 else:
1102 else:
1121 nf = ff
1103 nf = ff
1122 if nf in results:
1104 if nf in results:
1123 continue
1105 continue
1124
1106
1125 try:
1107 try:
1126 st = lstat(join(nf))
1108 st = lstat(join(nf))
1127 kind = getkind(st.st_mode)
1109 kind = getkind(st.st_mode)
1128 if kind == dirkind:
1110 if kind == dirkind:
1129 if nf in dmap:
1111 if nf in dmap:
1130 # file replaced by dir on disk but still in dirstate
1112 # file replaced by dir on disk but still in dirstate
1131 results[nf] = None
1113 results[nf] = None
1132 foundadd((nf, ff))
1114 foundadd((nf, ff))
1133 elif kind == regkind or kind == lnkkind:
1115 elif kind == regkind or kind == lnkkind:
1134 results[nf] = st
1116 results[nf] = st
1135 else:
1117 else:
1136 badfn(ff, badtype(kind))
1118 badfn(ff, badtype(kind))
1137 if nf in dmap:
1119 if nf in dmap:
1138 results[nf] = None
1120 results[nf] = None
1139 except OSError as inst: # nf not found on disk - it is dirstate only
1121 except OSError as inst: # nf not found on disk - it is dirstate only
1140 if nf in dmap: # does it exactly match a missing file?
1122 if nf in dmap: # does it exactly match a missing file?
1141 results[nf] = None
1123 results[nf] = None
1142 else: # does it match a missing directory?
1124 else: # does it match a missing directory?
1143 if self._map.hasdir(nf):
1125 if self._map.hasdir(nf):
1144 notfoundadd(nf)
1126 notfoundadd(nf)
1145 else:
1127 else:
1146 badfn(ff, encoding.strtolocal(inst.strerror))
1128 badfn(ff, encoding.strtolocal(inst.strerror))
1147
1129
1148 # match.files() may contain explicitly-specified paths that shouldn't
1130 # match.files() may contain explicitly-specified paths that shouldn't
1149 # be taken; drop them from the list of files found. dirsfound/notfound
1131 # be taken; drop them from the list of files found. dirsfound/notfound
1150 # aren't filtered here because they will be tested later.
1132 # aren't filtered here because they will be tested later.
1151 if match.anypats():
1133 if match.anypats():
1152 for f in list(results):
1134 for f in list(results):
1153 if f == b'.hg' or f in subrepos:
1135 if f == b'.hg' or f in subrepos:
1154 # keep sentinel to disable further out-of-repo walks
1136 # keep sentinel to disable further out-of-repo walks
1155 continue
1137 continue
1156 if not match(f):
1138 if not match(f):
1157 del results[f]
1139 del results[f]
1158
1140
1159 # Case insensitive filesystems cannot rely on lstat() failing to detect
1141 # Case insensitive filesystems cannot rely on lstat() failing to detect
1160 # a case-only rename. Prune the stat object for any file that does not
1142 # a case-only rename. Prune the stat object for any file that does not
1161 # match the case in the filesystem, if there are multiple files that
1143 # match the case in the filesystem, if there are multiple files that
1162 # normalize to the same path.
1144 # normalize to the same path.
1163 if match.isexact() and self._checkcase:
1145 if match.isexact() and self._checkcase:
1164 normed = {}
1146 normed = {}
1165
1147
1166 for f, st in pycompat.iteritems(results):
1148 for f, st in pycompat.iteritems(results):
1167 if st is None:
1149 if st is None:
1168 continue
1150 continue
1169
1151
1170 nc = util.normcase(f)
1152 nc = util.normcase(f)
1171 paths = normed.get(nc)
1153 paths = normed.get(nc)
1172
1154
1173 if paths is None:
1155 if paths is None:
1174 paths = set()
1156 paths = set()
1175 normed[nc] = paths
1157 normed[nc] = paths
1176
1158
1177 paths.add(f)
1159 paths.add(f)
1178
1160
1179 for norm, paths in pycompat.iteritems(normed):
1161 for norm, paths in pycompat.iteritems(normed):
1180 if len(paths) > 1:
1162 if len(paths) > 1:
1181 for path in paths:
1163 for path in paths:
1182 folded = self._discoverpath(
1164 folded = self._discoverpath(
1183 path, norm, True, None, self._map.dirfoldmap
1165 path, norm, True, None, self._map.dirfoldmap
1184 )
1166 )
1185 if path != folded:
1167 if path != folded:
1186 results[path] = None
1168 results[path] = None
1187
1169
1188 return results, dirsfound, dirsnotfound
1170 return results, dirsfound, dirsnotfound
1189
1171
1190 def walk(self, match, subrepos, unknown, ignored, full=True):
1172 def walk(self, match, subrepos, unknown, ignored, full=True):
1191 """
1173 """
1192 Walk recursively through the directory tree, finding all files
1174 Walk recursively through the directory tree, finding all files
1193 matched by match.
1175 matched by match.
1194
1176
1195 If full is False, maybe skip some known-clean files.
1177 If full is False, maybe skip some known-clean files.
1196
1178
1197 Return a dict mapping filename to stat-like object (either
1179 Return a dict mapping filename to stat-like object (either
1198 mercurial.osutil.stat instance or return value of os.stat()).
1180 mercurial.osutil.stat instance or return value of os.stat()).
1199
1181
1200 """
1182 """
1201 # full is a flag that extensions that hook into walk can use -- this
1183 # full is a flag that extensions that hook into walk can use -- this
1202 # implementation doesn't use it at all. This satisfies the contract
1184 # implementation doesn't use it at all. This satisfies the contract
1203 # because we only guarantee a "maybe".
1185 # because we only guarantee a "maybe".
1204
1186
1205 if ignored:
1187 if ignored:
1206 ignore = util.never
1188 ignore = util.never
1207 dirignore = util.never
1189 dirignore = util.never
1208 elif unknown:
1190 elif unknown:
1209 ignore = self._ignore
1191 ignore = self._ignore
1210 dirignore = self._dirignore
1192 dirignore = self._dirignore
1211 else:
1193 else:
1212 # if not unknown and not ignored, drop dir recursion and step 2
1194 # if not unknown and not ignored, drop dir recursion and step 2
1213 ignore = util.always
1195 ignore = util.always
1214 dirignore = util.always
1196 dirignore = util.always
1215
1197
1216 matchfn = match.matchfn
1198 matchfn = match.matchfn
1217 matchalways = match.always()
1199 matchalways = match.always()
1218 matchtdir = match.traversedir
1200 matchtdir = match.traversedir
1219 dmap = self._map
1201 dmap = self._map
1220 listdir = util.listdir
1202 listdir = util.listdir
1221 lstat = os.lstat
1203 lstat = os.lstat
1222 dirkind = stat.S_IFDIR
1204 dirkind = stat.S_IFDIR
1223 regkind = stat.S_IFREG
1205 regkind = stat.S_IFREG
1224 lnkkind = stat.S_IFLNK
1206 lnkkind = stat.S_IFLNK
1225 join = self._join
1207 join = self._join
1226
1208
1227 exact = skipstep3 = False
1209 exact = skipstep3 = False
1228 if match.isexact(): # match.exact
1210 if match.isexact(): # match.exact
1229 exact = True
1211 exact = True
1230 dirignore = util.always # skip step 2
1212 dirignore = util.always # skip step 2
1231 elif match.prefix(): # match.match, no patterns
1213 elif match.prefix(): # match.match, no patterns
1232 skipstep3 = True
1214 skipstep3 = True
1233
1215
1234 if not exact and self._checkcase:
1216 if not exact and self._checkcase:
1235 normalize = self._normalize
1217 normalize = self._normalize
1236 normalizefile = self._normalizefile
1218 normalizefile = self._normalizefile
1237 skipstep3 = False
1219 skipstep3 = False
1238 else:
1220 else:
1239 normalize = self._normalize
1221 normalize = self._normalize
1240 normalizefile = None
1222 normalizefile = None
1241
1223
1242 # step 1: find all explicit files
1224 # step 1: find all explicit files
1243 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1225 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1244 if matchtdir:
1226 if matchtdir:
1245 for d in work:
1227 for d in work:
1246 matchtdir(d[0])
1228 matchtdir(d[0])
1247 for d in dirsnotfound:
1229 for d in dirsnotfound:
1248 matchtdir(d)
1230 matchtdir(d)
1249
1231
1250 skipstep3 = skipstep3 and not (work or dirsnotfound)
1232 skipstep3 = skipstep3 and not (work or dirsnotfound)
1251 work = [d for d in work if not dirignore(d[0])]
1233 work = [d for d in work if not dirignore(d[0])]
1252
1234
1253 # step 2: visit subdirectories
1235 # step 2: visit subdirectories
1254 def traverse(work, alreadynormed):
1236 def traverse(work, alreadynormed):
1255 wadd = work.append
1237 wadd = work.append
1256 while work:
1238 while work:
1257 tracing.counter('dirstate.walk work', len(work))
1239 tracing.counter('dirstate.walk work', len(work))
1258 nd = work.pop()
1240 nd = work.pop()
1259 visitentries = match.visitchildrenset(nd)
1241 visitentries = match.visitchildrenset(nd)
1260 if not visitentries:
1242 if not visitentries:
1261 continue
1243 continue
1262 if visitentries == b'this' or visitentries == b'all':
1244 if visitentries == b'this' or visitentries == b'all':
1263 visitentries = None
1245 visitentries = None
1264 skip = None
1246 skip = None
1265 if nd != b'':
1247 if nd != b'':
1266 skip = b'.hg'
1248 skip = b'.hg'
1267 try:
1249 try:
1268 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1250 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1269 entries = listdir(join(nd), stat=True, skip=skip)
1251 entries = listdir(join(nd), stat=True, skip=skip)
1270 except OSError as inst:
1252 except OSError as inst:
1271 if inst.errno in (errno.EACCES, errno.ENOENT):
1253 if inst.errno in (errno.EACCES, errno.ENOENT):
1272 match.bad(
1254 match.bad(
1273 self.pathto(nd), encoding.strtolocal(inst.strerror)
1255 self.pathto(nd), encoding.strtolocal(inst.strerror)
1274 )
1256 )
1275 continue
1257 continue
1276 raise
1258 raise
1277 for f, kind, st in entries:
1259 for f, kind, st in entries:
1278 # Some matchers may return files in the visitentries set,
1260 # Some matchers may return files in the visitentries set,
1279 # instead of 'this', if the matcher explicitly mentions them
1261 # instead of 'this', if the matcher explicitly mentions them
1280 # and is not an exactmatcher. This is acceptable; we do not
1262 # and is not an exactmatcher. This is acceptable; we do not
1281 # make any hard assumptions about file-or-directory below
1263 # make any hard assumptions about file-or-directory below
1282 # based on the presence of `f` in visitentries. If
1264 # based on the presence of `f` in visitentries. If
1283 # visitchildrenset returned a set, we can always skip the
1265 # visitchildrenset returned a set, we can always skip the
1284 # entries *not* in the set it provided regardless of whether
1266 # entries *not* in the set it provided regardless of whether
1285 # they're actually a file or a directory.
1267 # they're actually a file or a directory.
1286 if visitentries and f not in visitentries:
1268 if visitentries and f not in visitentries:
1287 continue
1269 continue
1288 if normalizefile:
1270 if normalizefile:
1289 # even though f might be a directory, we're only
1271 # even though f might be a directory, we're only
1290 # interested in comparing it to files currently in the
1272 # interested in comparing it to files currently in the
1291 # dmap -- therefore normalizefile is enough
1273 # dmap -- therefore normalizefile is enough
1292 nf = normalizefile(
1274 nf = normalizefile(
1293 nd and (nd + b"/" + f) or f, True, True
1275 nd and (nd + b"/" + f) or f, True, True
1294 )
1276 )
1295 else:
1277 else:
1296 nf = nd and (nd + b"/" + f) or f
1278 nf = nd and (nd + b"/" + f) or f
1297 if nf not in results:
1279 if nf not in results:
1298 if kind == dirkind:
1280 if kind == dirkind:
1299 if not ignore(nf):
1281 if not ignore(nf):
1300 if matchtdir:
1282 if matchtdir:
1301 matchtdir(nf)
1283 matchtdir(nf)
1302 wadd(nf)
1284 wadd(nf)
1303 if nf in dmap and (matchalways or matchfn(nf)):
1285 if nf in dmap and (matchalways or matchfn(nf)):
1304 results[nf] = None
1286 results[nf] = None
1305 elif kind == regkind or kind == lnkkind:
1287 elif kind == regkind or kind == lnkkind:
1306 if nf in dmap:
1288 if nf in dmap:
1307 if matchalways or matchfn(nf):
1289 if matchalways or matchfn(nf):
1308 results[nf] = st
1290 results[nf] = st
1309 elif (matchalways or matchfn(nf)) and not ignore(
1291 elif (matchalways or matchfn(nf)) and not ignore(
1310 nf
1292 nf
1311 ):
1293 ):
1312 # unknown file -- normalize if necessary
1294 # unknown file -- normalize if necessary
1313 if not alreadynormed:
1295 if not alreadynormed:
1314 nf = normalize(nf, False, True)
1296 nf = normalize(nf, False, True)
1315 results[nf] = st
1297 results[nf] = st
1316 elif nf in dmap and (matchalways or matchfn(nf)):
1298 elif nf in dmap and (matchalways or matchfn(nf)):
1317 results[nf] = None
1299 results[nf] = None
1318
1300
1319 for nd, d in work:
1301 for nd, d in work:
1320 # alreadynormed means that processwork doesn't have to do any
1302 # alreadynormed means that processwork doesn't have to do any
1321 # expensive directory normalization
1303 # expensive directory normalization
1322 alreadynormed = not normalize or nd == d
1304 alreadynormed = not normalize or nd == d
1323 traverse([d], alreadynormed)
1305 traverse([d], alreadynormed)
1324
1306
1325 for s in subrepos:
1307 for s in subrepos:
1326 del results[s]
1308 del results[s]
1327 del results[b'.hg']
1309 del results[b'.hg']
1328
1310
1329 # step 3: visit remaining files from dmap
1311 # step 3: visit remaining files from dmap
1330 if not skipstep3 and not exact:
1312 if not skipstep3 and not exact:
1331 # If a dmap file is not in results yet, it was either
1313 # If a dmap file is not in results yet, it was either
1332 # a) not matching matchfn b) ignored, c) missing, or d) under a
1314 # a) not matching matchfn b) ignored, c) missing, or d) under a
1333 # symlink directory.
1315 # symlink directory.
1334 if not results and matchalways:
1316 if not results and matchalways:
1335 visit = [f for f in dmap]
1317 visit = [f for f in dmap]
1336 else:
1318 else:
1337 visit = [f for f in dmap if f not in results and matchfn(f)]
1319 visit = [f for f in dmap if f not in results and matchfn(f)]
1338 visit.sort()
1320 visit.sort()
1339
1321
1340 if unknown:
1322 if unknown:
1341 # unknown == True means we walked all dirs under the roots
1323 # unknown == True means we walked all dirs under the roots
1342 # that wasn't ignored, and everything that matched was stat'ed
1324 # that wasn't ignored, and everything that matched was stat'ed
1343 # and is already in results.
1325 # and is already in results.
1344 # The rest must thus be ignored or under a symlink.
1326 # The rest must thus be ignored or under a symlink.
1345 audit_path = pathutil.pathauditor(self._root, cached=True)
1327 audit_path = pathutil.pathauditor(self._root, cached=True)
1346
1328
1347 for nf in iter(visit):
1329 for nf in iter(visit):
1348 # If a stat for the same file was already added with a
1330 # If a stat for the same file was already added with a
1349 # different case, don't add one for this, since that would
1331 # different case, don't add one for this, since that would
1350 # make it appear as if the file exists under both names
1332 # make it appear as if the file exists under both names
1351 # on disk.
1333 # on disk.
1352 if (
1334 if (
1353 normalizefile
1335 normalizefile
1354 and normalizefile(nf, True, True) in results
1336 and normalizefile(nf, True, True) in results
1355 ):
1337 ):
1356 results[nf] = None
1338 results[nf] = None
1357 # Report ignored items in the dmap as long as they are not
1339 # Report ignored items in the dmap as long as they are not
1358 # under a symlink directory.
1340 # under a symlink directory.
1359 elif audit_path.check(nf):
1341 elif audit_path.check(nf):
1360 try:
1342 try:
1361 results[nf] = lstat(join(nf))
1343 results[nf] = lstat(join(nf))
1362 # file was just ignored, no links, and exists
1344 # file was just ignored, no links, and exists
1363 except OSError:
1345 except OSError:
1364 # file doesn't exist
1346 # file doesn't exist
1365 results[nf] = None
1347 results[nf] = None
1366 else:
1348 else:
1367 # It's either missing or under a symlink directory
1349 # It's either missing or under a symlink directory
1368 # which we in this case report as missing
1350 # which we in this case report as missing
1369 results[nf] = None
1351 results[nf] = None
1370 else:
1352 else:
1371 # We may not have walked the full directory tree above,
1353 # We may not have walked the full directory tree above,
1372 # so stat and check everything we missed.
1354 # so stat and check everything we missed.
1373 iv = iter(visit)
1355 iv = iter(visit)
1374 for st in util.statfiles([join(i) for i in visit]):
1356 for st in util.statfiles([join(i) for i in visit]):
1375 results[next(iv)] = st
1357 results[next(iv)] = st
1376 return results
1358 return results
1377
1359
1378 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1360 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1379 # Force Rayon (Rust parallelism library) to respect the number of
1361 # Force Rayon (Rust parallelism library) to respect the number of
1380 # workers. This is a temporary workaround until Rust code knows
1362 # workers. This is a temporary workaround until Rust code knows
1381 # how to read the config file.
1363 # how to read the config file.
1382 numcpus = self._ui.configint(b"worker", b"numcpus")
1364 numcpus = self._ui.configint(b"worker", b"numcpus")
1383 if numcpus is not None:
1365 if numcpus is not None:
1384 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1366 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1385
1367
1386 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1368 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1387 if not workers_enabled:
1369 if not workers_enabled:
1388 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1370 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1389
1371
1390 (
1372 (
1391 lookup,
1373 lookup,
1392 modified,
1374 modified,
1393 added,
1375 added,
1394 removed,
1376 removed,
1395 deleted,
1377 deleted,
1396 clean,
1378 clean,
1397 ignored,
1379 ignored,
1398 unknown,
1380 unknown,
1399 warnings,
1381 warnings,
1400 bad,
1382 bad,
1401 traversed,
1383 traversed,
1402 dirty,
1384 dirty,
1403 ) = rustmod.status(
1385 ) = rustmod.status(
1404 self._map._rustmap,
1386 self._map._rustmap,
1405 matcher,
1387 matcher,
1406 self._rootdir,
1388 self._rootdir,
1407 self._ignorefiles(),
1389 self._ignorefiles(),
1408 self._checkexec,
1390 self._checkexec,
1409 self._lastnormaltime,
1391 self._lastnormaltime,
1410 bool(list_clean),
1392 bool(list_clean),
1411 bool(list_ignored),
1393 bool(list_ignored),
1412 bool(list_unknown),
1394 bool(list_unknown),
1413 bool(matcher.traversedir),
1395 bool(matcher.traversedir),
1414 )
1396 )
1415
1397
1416 self._dirty |= dirty
1398 self._dirty |= dirty
1417
1399
1418 if matcher.traversedir:
1400 if matcher.traversedir:
1419 for dir in traversed:
1401 for dir in traversed:
1420 matcher.traversedir(dir)
1402 matcher.traversedir(dir)
1421
1403
1422 if self._ui.warn:
1404 if self._ui.warn:
1423 for item in warnings:
1405 for item in warnings:
1424 if isinstance(item, tuple):
1406 if isinstance(item, tuple):
1425 file_path, syntax = item
1407 file_path, syntax = item
1426 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1408 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1427 file_path,
1409 file_path,
1428 syntax,
1410 syntax,
1429 )
1411 )
1430 self._ui.warn(msg)
1412 self._ui.warn(msg)
1431 else:
1413 else:
1432 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1414 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1433 self._ui.warn(
1415 self._ui.warn(
1434 msg
1416 msg
1435 % (
1417 % (
1436 pathutil.canonpath(
1418 pathutil.canonpath(
1437 self._rootdir, self._rootdir, item
1419 self._rootdir, self._rootdir, item
1438 ),
1420 ),
1439 b"No such file or directory",
1421 b"No such file or directory",
1440 )
1422 )
1441 )
1423 )
1442
1424
1443 for (fn, message) in bad:
1425 for (fn, message) in bad:
1444 matcher.bad(fn, encoding.strtolocal(message))
1426 matcher.bad(fn, encoding.strtolocal(message))
1445
1427
1446 status = scmutil.status(
1428 status = scmutil.status(
1447 modified=modified,
1429 modified=modified,
1448 added=added,
1430 added=added,
1449 removed=removed,
1431 removed=removed,
1450 deleted=deleted,
1432 deleted=deleted,
1451 unknown=unknown,
1433 unknown=unknown,
1452 ignored=ignored,
1434 ignored=ignored,
1453 clean=clean,
1435 clean=clean,
1454 )
1436 )
1455 return (lookup, status)
1437 return (lookup, status)
1456
1438
1457 def status(self, match, subrepos, ignored, clean, unknown):
1439 def status(self, match, subrepos, ignored, clean, unknown):
1458 """Determine the status of the working copy relative to the
1440 """Determine the status of the working copy relative to the
1459 dirstate and return a pair of (unsure, status), where status is of type
1441 dirstate and return a pair of (unsure, status), where status is of type
1460 scmutil.status and:
1442 scmutil.status and:
1461
1443
1462 unsure:
1444 unsure:
1463 files that might have been modified since the dirstate was
1445 files that might have been modified since the dirstate was
1464 written, but need to be read to be sure (size is the same
1446 written, but need to be read to be sure (size is the same
1465 but mtime differs)
1447 but mtime differs)
1466 status.modified:
1448 status.modified:
1467 files that have definitely been modified since the dirstate
1449 files that have definitely been modified since the dirstate
1468 was written (different size or mode)
1450 was written (different size or mode)
1469 status.clean:
1451 status.clean:
1470 files that have definitely not been modified since the
1452 files that have definitely not been modified since the
1471 dirstate was written
1453 dirstate was written
1472 """
1454 """
1473 listignored, listclean, listunknown = ignored, clean, unknown
1455 listignored, listclean, listunknown = ignored, clean, unknown
1474 lookup, modified, added, unknown, ignored = [], [], [], [], []
1456 lookup, modified, added, unknown, ignored = [], [], [], [], []
1475 removed, deleted, clean = [], [], []
1457 removed, deleted, clean = [], [], []
1476
1458
1477 dmap = self._map
1459 dmap = self._map
1478 dmap.preload()
1460 dmap.preload()
1479
1461
1480 use_rust = True
1462 use_rust = True
1481
1463
1482 allowed_matchers = (
1464 allowed_matchers = (
1483 matchmod.alwaysmatcher,
1465 matchmod.alwaysmatcher,
1484 matchmod.exactmatcher,
1466 matchmod.exactmatcher,
1485 matchmod.includematcher,
1467 matchmod.includematcher,
1486 )
1468 )
1487
1469
1488 if rustmod is None:
1470 if rustmod is None:
1489 use_rust = False
1471 use_rust = False
1490 elif self._checkcase:
1472 elif self._checkcase:
1491 # Case-insensitive filesystems are not handled yet
1473 # Case-insensitive filesystems are not handled yet
1492 use_rust = False
1474 use_rust = False
1493 elif subrepos:
1475 elif subrepos:
1494 use_rust = False
1476 use_rust = False
1495 elif sparse.enabled:
1477 elif sparse.enabled:
1496 use_rust = False
1478 use_rust = False
1497 elif not isinstance(match, allowed_matchers):
1479 elif not isinstance(match, allowed_matchers):
1498 # Some matchers have yet to be implemented
1480 # Some matchers have yet to be implemented
1499 use_rust = False
1481 use_rust = False
1500
1482
1501 if use_rust:
1483 if use_rust:
1502 try:
1484 try:
1503 return self._rust_status(
1485 return self._rust_status(
1504 match, listclean, listignored, listunknown
1486 match, listclean, listignored, listunknown
1505 )
1487 )
1506 except rustmod.FallbackError:
1488 except rustmod.FallbackError:
1507 pass
1489 pass
1508
1490
1509 def noop(f):
1491 def noop(f):
1510 pass
1492 pass
1511
1493
1512 dcontains = dmap.__contains__
1494 dcontains = dmap.__contains__
1513 dget = dmap.__getitem__
1495 dget = dmap.__getitem__
1514 ladd = lookup.append # aka "unsure"
1496 ladd = lookup.append # aka "unsure"
1515 madd = modified.append
1497 madd = modified.append
1516 aadd = added.append
1498 aadd = added.append
1517 uadd = unknown.append if listunknown else noop
1499 uadd = unknown.append if listunknown else noop
1518 iadd = ignored.append if listignored else noop
1500 iadd = ignored.append if listignored else noop
1519 radd = removed.append
1501 radd = removed.append
1520 dadd = deleted.append
1502 dadd = deleted.append
1521 cadd = clean.append if listclean else noop
1503 cadd = clean.append if listclean else noop
1522 mexact = match.exact
1504 mexact = match.exact
1523 dirignore = self._dirignore
1505 dirignore = self._dirignore
1524 checkexec = self._checkexec
1506 checkexec = self._checkexec
1525 copymap = self._map.copymap
1507 copymap = self._map.copymap
1526 lastnormaltime = self._lastnormaltime
1508 lastnormaltime = self._lastnormaltime
1527
1509
1528 # We need to do full walks when either
1510 # We need to do full walks when either
1529 # - we're listing all clean files, or
1511 # - we're listing all clean files, or
1530 # - match.traversedir does something, because match.traversedir should
1512 # - match.traversedir does something, because match.traversedir should
1531 # be called for every dir in the working dir
1513 # be called for every dir in the working dir
1532 full = listclean or match.traversedir is not None
1514 full = listclean or match.traversedir is not None
1533 for fn, st in pycompat.iteritems(
1515 for fn, st in pycompat.iteritems(
1534 self.walk(match, subrepos, listunknown, listignored, full=full)
1516 self.walk(match, subrepos, listunknown, listignored, full=full)
1535 ):
1517 ):
1536 if not dcontains(fn):
1518 if not dcontains(fn):
1537 if (listignored or mexact(fn)) and dirignore(fn):
1519 if (listignored or mexact(fn)) and dirignore(fn):
1538 if listignored:
1520 if listignored:
1539 iadd(fn)
1521 iadd(fn)
1540 else:
1522 else:
1541 uadd(fn)
1523 uadd(fn)
1542 continue
1524 continue
1543
1525
1544 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1526 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1545 # written like that for performance reasons. dmap[fn] is not a
1527 # written like that for performance reasons. dmap[fn] is not a
1546 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1528 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1547 # opcode has fast paths when the value to be unpacked is a tuple or
1529 # opcode has fast paths when the value to be unpacked is a tuple or
1548 # a list, but falls back to creating a full-fledged iterator in
1530 # a list, but falls back to creating a full-fledged iterator in
1549 # general. That is much slower than simply accessing and storing the
1531 # general. That is much slower than simply accessing and storing the
1550 # tuple members one by one.
1532 # tuple members one by one.
1551 t = dget(fn)
1533 t = dget(fn)
1552 mode = t.mode
1534 mode = t.mode
1553 size = t.size
1535 size = t.size
1554 time = t.mtime
1536 time = t.mtime
1555
1537
1556 if not st and t.tracked:
1538 if not st and t.tracked:
1557 dadd(fn)
1539 dadd(fn)
1558 elif t.merged:
1540 elif t.merged:
1559 madd(fn)
1541 madd(fn)
1560 elif t.added:
1542 elif t.added:
1561 aadd(fn)
1543 aadd(fn)
1562 elif t.removed:
1544 elif t.removed:
1563 radd(fn)
1545 radd(fn)
1564 elif t.tracked:
1546 elif t.tracked:
1565 if (
1547 if (
1566 size >= 0
1548 size >= 0
1567 and (
1549 and (
1568 (size != st.st_size and size != st.st_size & _rangemask)
1550 (size != st.st_size and size != st.st_size & _rangemask)
1569 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1551 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1570 )
1552 )
1571 or t.from_p2
1553 or t.from_p2
1572 or fn in copymap
1554 or fn in copymap
1573 ):
1555 ):
1574 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1556 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1575 # issue6456: Size returned may be longer due to
1557 # issue6456: Size returned may be longer due to
1576 # encryption on EXT-4 fscrypt, undecided.
1558 # encryption on EXT-4 fscrypt, undecided.
1577 ladd(fn)
1559 ladd(fn)
1578 else:
1560 else:
1579 madd(fn)
1561 madd(fn)
1580 elif (
1562 elif (
1581 time != st[stat.ST_MTIME]
1563 time != st[stat.ST_MTIME]
1582 and time != st[stat.ST_MTIME] & _rangemask
1564 and time != st[stat.ST_MTIME] & _rangemask
1583 ):
1565 ):
1584 ladd(fn)
1566 ladd(fn)
1585 elif st[stat.ST_MTIME] == lastnormaltime:
1567 elif st[stat.ST_MTIME] == lastnormaltime:
1586 # fn may have just been marked as normal and it may have
1568 # fn may have just been marked as normal and it may have
1587 # changed in the same second without changing its size.
1569 # changed in the same second without changing its size.
1588 # This can happen if we quickly do multiple commits.
1570 # This can happen if we quickly do multiple commits.
1589 # Force lookup, so we don't miss such a racy file change.
1571 # Force lookup, so we don't miss such a racy file change.
1590 ladd(fn)
1572 ladd(fn)
1591 elif listclean:
1573 elif listclean:
1592 cadd(fn)
1574 cadd(fn)
1593 status = scmutil.status(
1575 status = scmutil.status(
1594 modified, added, removed, deleted, unknown, ignored, clean
1576 modified, added, removed, deleted, unknown, ignored, clean
1595 )
1577 )
1596 return (lookup, status)
1578 return (lookup, status)
1597
1579
1598 def matches(self, match):
1580 def matches(self, match):
1599 """
1581 """
1600 return files in the dirstate (in whatever state) filtered by match
1582 return files in the dirstate (in whatever state) filtered by match
1601 """
1583 """
1602 dmap = self._map
1584 dmap = self._map
1603 if rustmod is not None:
1585 if rustmod is not None:
1604 dmap = self._map._rustmap
1586 dmap = self._map._rustmap
1605
1587
1606 if match.always():
1588 if match.always():
1607 return dmap.keys()
1589 return dmap.keys()
1608 files = match.files()
1590 files = match.files()
1609 if match.isexact():
1591 if match.isexact():
1610 # fast path -- filter the other way around, since typically files is
1592 # fast path -- filter the other way around, since typically files is
1611 # much smaller than dmap
1593 # much smaller than dmap
1612 return [f for f in files if f in dmap]
1594 return [f for f in files if f in dmap]
1613 if match.prefix() and all(fn in dmap for fn in files):
1595 if match.prefix() and all(fn in dmap for fn in files):
1614 # fast path -- all the values are known to be files, so just return
1596 # fast path -- all the values are known to be files, so just return
1615 # that
1597 # that
1616 return list(files)
1598 return list(files)
1617 return [f for f in dmap if match(f)]
1599 return [f for f in dmap if match(f)]
1618
1600
1619 def _actualfilename(self, tr):
1601 def _actualfilename(self, tr):
1620 if tr:
1602 if tr:
1621 return self._pendingfilename
1603 return self._pendingfilename
1622 else:
1604 else:
1623 return self._filename
1605 return self._filename
1624
1606
1625 def savebackup(self, tr, backupname):
1607 def savebackup(self, tr, backupname):
1626 '''Save current dirstate into backup file'''
1608 '''Save current dirstate into backup file'''
1627 filename = self._actualfilename(tr)
1609 filename = self._actualfilename(tr)
1628 assert backupname != filename
1610 assert backupname != filename
1629
1611
1630 # use '_writedirstate' instead of 'write' to write changes certainly,
1612 # use '_writedirstate' instead of 'write' to write changes certainly,
1631 # because the latter omits writing out if transaction is running.
1613 # because the latter omits writing out if transaction is running.
1632 # output file will be used to create backup of dirstate at this point.
1614 # output file will be used to create backup of dirstate at this point.
1633 if self._dirty or not self._opener.exists(filename):
1615 if self._dirty or not self._opener.exists(filename):
1634 self._writedirstate(
1616 self._writedirstate(
1635 tr,
1617 tr,
1636 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1618 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1637 )
1619 )
1638
1620
1639 if tr:
1621 if tr:
1640 # ensure that subsequent tr.writepending returns True for
1622 # ensure that subsequent tr.writepending returns True for
1641 # changes written out above, even if dirstate is never
1623 # changes written out above, even if dirstate is never
1642 # changed after this
1624 # changed after this
1643 tr.addfilegenerator(
1625 tr.addfilegenerator(
1644 b'dirstate',
1626 b'dirstate',
1645 (self._filename,),
1627 (self._filename,),
1646 lambda f: self._writedirstate(tr, f),
1628 lambda f: self._writedirstate(tr, f),
1647 location=b'plain',
1629 location=b'plain',
1648 )
1630 )
1649
1631
1650 # ensure that pending file written above is unlinked at
1632 # ensure that pending file written above is unlinked at
1651 # failure, even if tr.writepending isn't invoked until the
1633 # failure, even if tr.writepending isn't invoked until the
1652 # end of this transaction
1634 # end of this transaction
1653 tr.registertmp(filename, location=b'plain')
1635 tr.registertmp(filename, location=b'plain')
1654
1636
1655 self._opener.tryunlink(backupname)
1637 self._opener.tryunlink(backupname)
1656 # hardlink backup is okay because _writedirstate is always called
1638 # hardlink backup is okay because _writedirstate is always called
1657 # with an "atomictemp=True" file.
1639 # with an "atomictemp=True" file.
1658 util.copyfile(
1640 util.copyfile(
1659 self._opener.join(filename),
1641 self._opener.join(filename),
1660 self._opener.join(backupname),
1642 self._opener.join(backupname),
1661 hardlink=True,
1643 hardlink=True,
1662 )
1644 )
1663
1645
1664 def restorebackup(self, tr, backupname):
1646 def restorebackup(self, tr, backupname):
1665 '''Restore dirstate by backup file'''
1647 '''Restore dirstate by backup file'''
1666 # this "invalidate()" prevents "wlock.release()" from writing
1648 # this "invalidate()" prevents "wlock.release()" from writing
1667 # changes of dirstate out after restoring from backup file
1649 # changes of dirstate out after restoring from backup file
1668 self.invalidate()
1650 self.invalidate()
1669 filename = self._actualfilename(tr)
1651 filename = self._actualfilename(tr)
1670 o = self._opener
1652 o = self._opener
1671 if util.samefile(o.join(backupname), o.join(filename)):
1653 if util.samefile(o.join(backupname), o.join(filename)):
1672 o.unlink(backupname)
1654 o.unlink(backupname)
1673 else:
1655 else:
1674 o.rename(backupname, filename, checkambig=True)
1656 o.rename(backupname, filename, checkambig=True)
1675
1657
1676 def clearbackup(self, tr, backupname):
1658 def clearbackup(self, tr, backupname):
1677 '''Clear backup file'''
1659 '''Clear backup file'''
1678 self._opener.unlink(backupname)
1660 self._opener.unlink(backupname)
@@ -1,226 +1,223 b''
1 from __future__ import absolute_import, print_function
1 from __future__ import absolute_import, print_function
2
2
3 import contextlib
3 import contextlib
4
4
5 from . import util as interfaceutil
5 from . import util as interfaceutil
6
6
7
7
8 class idirstate(interfaceutil.Interface):
8 class idirstate(interfaceutil.Interface):
9 def __init__(
9 def __init__(
10 opener,
10 opener,
11 ui,
11 ui,
12 root,
12 root,
13 validate,
13 validate,
14 sparsematchfn,
14 sparsematchfn,
15 nodeconstants,
15 nodeconstants,
16 use_dirstate_v2,
16 use_dirstate_v2,
17 ):
17 ):
18 """Create a new dirstate object.
18 """Create a new dirstate object.
19
19
20 opener is an open()-like callable that can be used to open the
20 opener is an open()-like callable that can be used to open the
21 dirstate file; root is the root of the directory tracked by
21 dirstate file; root is the root of the directory tracked by
22 the dirstate.
22 the dirstate.
23 """
23 """
24
24
25 # TODO: all these private methods and attributes should be made
25 # TODO: all these private methods and attributes should be made
26 # public or removed from the interface.
26 # public or removed from the interface.
27 _ignore = interfaceutil.Attribute("""Matcher for ignored files.""")
27 _ignore = interfaceutil.Attribute("""Matcher for ignored files.""")
28
28
29 def _ignorefiles():
29 def _ignorefiles():
30 """Return a list of files containing patterns to ignore."""
30 """Return a list of files containing patterns to ignore."""
31
31
32 def _ignorefileandline(f):
32 def _ignorefileandline(f):
33 """Given a file `f`, return the ignore file and line that ignores it."""
33 """Given a file `f`, return the ignore file and line that ignores it."""
34
34
35 _checklink = interfaceutil.Attribute("""Callable for checking symlinks.""")
35 _checklink = interfaceutil.Attribute("""Callable for checking symlinks.""")
36 _checkexec = interfaceutil.Attribute("""Callable for checking exec bits.""")
36 _checkexec = interfaceutil.Attribute("""Callable for checking exec bits.""")
37
37
38 @contextlib.contextmanager
38 @contextlib.contextmanager
39 def parentchange():
39 def parentchange():
40 """Context manager for handling dirstate parents.
40 """Context manager for handling dirstate parents.
41
41
42 If an exception occurs in the scope of the context manager,
42 If an exception occurs in the scope of the context manager,
43 the incoherent dirstate won't be written when wlock is
43 the incoherent dirstate won't be written when wlock is
44 released.
44 released.
45 """
45 """
46
46
47 def pendingparentchange():
47 def pendingparentchange():
48 """Returns true if the dirstate is in the middle of a set of changes
48 """Returns true if the dirstate is in the middle of a set of changes
49 that modify the dirstate parent.
49 that modify the dirstate parent.
50 """
50 """
51
51
52 def hasdir(d):
52 def hasdir(d):
53 pass
53 pass
54
54
55 def flagfunc(buildfallback):
55 def flagfunc(buildfallback):
56 pass
56 pass
57
57
58 def getcwd():
58 def getcwd():
59 """Return the path from which a canonical path is calculated.
59 """Return the path from which a canonical path is calculated.
60
60
61 This path should be used to resolve file patterns or to convert
61 This path should be used to resolve file patterns or to convert
62 canonical paths back to file paths for display. It shouldn't be
62 canonical paths back to file paths for display. It shouldn't be
63 used to get real file paths. Use vfs functions instead.
63 used to get real file paths. Use vfs functions instead.
64 """
64 """
65
65
66 def pathto(f, cwd=None):
66 def pathto(f, cwd=None):
67 pass
67 pass
68
68
69 def __getitem__(key):
69 def __getitem__(key):
70 """Return the current state of key (a filename) in the dirstate.
70 """Return the current state of key (a filename) in the dirstate.
71
71
72 States are:
72 States are:
73 n normal
73 n normal
74 m needs merging
74 m needs merging
75 r marked for removal
75 r marked for removal
76 a marked for addition
76 a marked for addition
77 ? not tracked
77 ? not tracked
78 """
78 """
79
79
80 def __contains__(key):
80 def __contains__(key):
81 """Check if bytestring `key` is known to the dirstate."""
81 """Check if bytestring `key` is known to the dirstate."""
82
82
83 def __iter__():
83 def __iter__():
84 """Iterate the dirstate's contained filenames as bytestrings."""
84 """Iterate the dirstate's contained filenames as bytestrings."""
85
85
86 def items():
86 def items():
87 """Iterate the dirstate's entries as (filename, DirstateItem.
87 """Iterate the dirstate's entries as (filename, DirstateItem.
88
88
89 As usual, filename is a bytestring.
89 As usual, filename is a bytestring.
90 """
90 """
91
91
92 iteritems = items
92 iteritems = items
93
93
94 def parents():
94 def parents():
95 pass
95 pass
96
96
97 def p1():
97 def p1():
98 pass
98 pass
99
99
100 def p2():
100 def p2():
101 pass
101 pass
102
102
103 def branch():
103 def branch():
104 pass
104 pass
105
105
106 def setparents(p1, p2=None):
106 def setparents(p1, p2=None):
107 """Set dirstate parents to p1 and p2.
107 """Set dirstate parents to p1 and p2.
108
108
109 When moving from two parents to one, 'm' merged entries a
109 When moving from two parents to one, 'm' merged entries a
110 adjusted to normal and previous copy records discarded and
110 adjusted to normal and previous copy records discarded and
111 returned by the call.
111 returned by the call.
112
112
113 See localrepo.setparents()
113 See localrepo.setparents()
114 """
114 """
115
115
116 def setbranch(branch):
116 def setbranch(branch):
117 pass
117 pass
118
118
119 def invalidate():
119 def invalidate():
120 """Causes the next access to reread the dirstate.
120 """Causes the next access to reread the dirstate.
121
121
122 This is different from localrepo.invalidatedirstate() because it always
122 This is different from localrepo.invalidatedirstate() because it always
123 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
123 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
124 check whether the dirstate has changed before rereading it."""
124 check whether the dirstate has changed before rereading it."""
125
125
126 def copy(source, dest):
126 def copy(source, dest):
127 """Mark dest as a copy of source. Unmark dest if source is None."""
127 """Mark dest as a copy of source. Unmark dest if source is None."""
128
128
129 def copied(file):
129 def copied(file):
130 pass
130 pass
131
131
132 def copies():
132 def copies():
133 pass
133 pass
134
134
135 def merge(f):
136 '''Mark a file merged.'''
137
138 def drop(f):
135 def drop(f):
139 '''Drop a file from the dirstate'''
136 '''Drop a file from the dirstate'''
140
137
141 def normalize(path, isknown=False, ignoremissing=False):
138 def normalize(path, isknown=False, ignoremissing=False):
142 """
139 """
143 normalize the case of a pathname when on a casefolding filesystem
140 normalize the case of a pathname when on a casefolding filesystem
144
141
145 isknown specifies whether the filename came from walking the
142 isknown specifies whether the filename came from walking the
146 disk, to avoid extra filesystem access.
143 disk, to avoid extra filesystem access.
147
144
148 If ignoremissing is True, missing path are returned
145 If ignoremissing is True, missing path are returned
149 unchanged. Otherwise, we try harder to normalize possibly
146 unchanged. Otherwise, we try harder to normalize possibly
150 existing path components.
147 existing path components.
151
148
152 The normalized case is determined based on the following precedence:
149 The normalized case is determined based on the following precedence:
153
150
154 - version of name already stored in the dirstate
151 - version of name already stored in the dirstate
155 - version of name stored on disk
152 - version of name stored on disk
156 - version provided via command arguments
153 - version provided via command arguments
157 """
154 """
158
155
159 def clear():
156 def clear():
160 pass
157 pass
161
158
162 def rebuild(parent, allfiles, changedfiles=None):
159 def rebuild(parent, allfiles, changedfiles=None):
163 pass
160 pass
164
161
165 def identity():
162 def identity():
166 """Return identity of dirstate it to detect changing in storage
163 """Return identity of dirstate it to detect changing in storage
167
164
168 If identity of previous dirstate is equal to this, writing
165 If identity of previous dirstate is equal to this, writing
169 changes based on the former dirstate out can keep consistency.
166 changes based on the former dirstate out can keep consistency.
170 """
167 """
171
168
172 def write(tr):
169 def write(tr):
173 pass
170 pass
174
171
175 def addparentchangecallback(category, callback):
172 def addparentchangecallback(category, callback):
176 """add a callback to be called when the wd parents are changed
173 """add a callback to be called when the wd parents are changed
177
174
178 Callback will be called with the following arguments:
175 Callback will be called with the following arguments:
179 dirstate, (oldp1, oldp2), (newp1, newp2)
176 dirstate, (oldp1, oldp2), (newp1, newp2)
180
177
181 Category is a unique identifier to allow overwriting an old callback
178 Category is a unique identifier to allow overwriting an old callback
182 with a newer callback.
179 with a newer callback.
183 """
180 """
184
181
185 def walk(match, subrepos, unknown, ignored, full=True):
182 def walk(match, subrepos, unknown, ignored, full=True):
186 """
183 """
187 Walk recursively through the directory tree, finding all files
184 Walk recursively through the directory tree, finding all files
188 matched by match.
185 matched by match.
189
186
190 If full is False, maybe skip some known-clean files.
187 If full is False, maybe skip some known-clean files.
191
188
192 Return a dict mapping filename to stat-like object (either
189 Return a dict mapping filename to stat-like object (either
193 mercurial.osutil.stat instance or return value of os.stat()).
190 mercurial.osutil.stat instance or return value of os.stat()).
194
191
195 """
192 """
196
193
197 def status(match, subrepos, ignored, clean, unknown):
194 def status(match, subrepos, ignored, clean, unknown):
198 """Determine the status of the working copy relative to the
195 """Determine the status of the working copy relative to the
199 dirstate and return a pair of (unsure, status), where status is of type
196 dirstate and return a pair of (unsure, status), where status is of type
200 scmutil.status and:
197 scmutil.status and:
201
198
202 unsure:
199 unsure:
203 files that might have been modified since the dirstate was
200 files that might have been modified since the dirstate was
204 written, but need to be read to be sure (size is the same
201 written, but need to be read to be sure (size is the same
205 but mtime differs)
202 but mtime differs)
206 status.modified:
203 status.modified:
207 files that have definitely been modified since the dirstate
204 files that have definitely been modified since the dirstate
208 was written (different size or mode)
205 was written (different size or mode)
209 status.clean:
206 status.clean:
210 files that have definitely not been modified since the
207 files that have definitely not been modified since the
211 dirstate was written
208 dirstate was written
212 """
209 """
213
210
214 def matches(match):
211 def matches(match):
215 """
212 """
216 return files in the dirstate (in whatever state) filtered by match
213 return files in the dirstate (in whatever state) filtered by match
217 """
214 """
218
215
219 def savebackup(tr, backupname):
216 def savebackup(tr, backupname):
220 '''Save current dirstate into backup file'''
217 '''Save current dirstate into backup file'''
221
218
222 def restorebackup(tr, backupname):
219 def restorebackup(tr, backupname):
223 '''Restore dirstate by backup file'''
220 '''Restore dirstate by backup file'''
224
221
225 def clearbackup(tr, backupname):
222 def clearbackup(tr, backupname):
226 '''Clear backup file'''
223 '''Clear backup file'''
General Comments 0
You need to be logged in to leave comments. Login now