##// END OF EJS Templates
sparse: directly inline the `walk` wrapping...
marmoute -
r50251:a87443d4 default
parent child Browse files
Show More
@@ -1,461 +1,448 b''
1 # sparse.py - allow sparse checkouts of the working directory
1 # sparse.py - allow sparse checkouts of the working directory
2 #
2 #
3 # Copyright 2014 Facebook, Inc.
3 # Copyright 2014 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """allow sparse checkouts of the working directory (EXPERIMENTAL)
8 """allow sparse checkouts of the working directory (EXPERIMENTAL)
9
9
10 (This extension is not yet protected by backwards compatibility
10 (This extension is not yet protected by backwards compatibility
11 guarantees. Any aspect may break in future releases until this
11 guarantees. Any aspect may break in future releases until this
12 notice is removed.)
12 notice is removed.)
13
13
14 This extension allows the working directory to only consist of a
14 This extension allows the working directory to only consist of a
15 subset of files for the revision. This allows specific files or
15 subset of files for the revision. This allows specific files or
16 directories to be explicitly included or excluded. Many repository
16 directories to be explicitly included or excluded. Many repository
17 operations have performance proportional to the number of files in
17 operations have performance proportional to the number of files in
18 the working directory. So only realizing a subset of files in the
18 the working directory. So only realizing a subset of files in the
19 working directory can improve performance.
19 working directory can improve performance.
20
20
21 Sparse Config Files
21 Sparse Config Files
22 -------------------
22 -------------------
23
23
24 The set of files that are part of a sparse checkout are defined by
24 The set of files that are part of a sparse checkout are defined by
25 a sparse config file. The file defines 3 things: includes (files to
25 a sparse config file. The file defines 3 things: includes (files to
26 include in the sparse checkout), excludes (files to exclude from the
26 include in the sparse checkout), excludes (files to exclude from the
27 sparse checkout), and profiles (links to other config files).
27 sparse checkout), and profiles (links to other config files).
28
28
29 The file format is newline delimited. Empty lines and lines beginning
29 The file format is newline delimited. Empty lines and lines beginning
30 with ``#`` are ignored.
30 with ``#`` are ignored.
31
31
32 Lines beginning with ``%include `` denote another sparse config file
32 Lines beginning with ``%include `` denote another sparse config file
33 to include. e.g. ``%include tests.sparse``. The filename is relative
33 to include. e.g. ``%include tests.sparse``. The filename is relative
34 to the repository root.
34 to the repository root.
35
35
36 The special lines ``[include]`` and ``[exclude]`` denote the section
36 The special lines ``[include]`` and ``[exclude]`` denote the section
37 for includes and excludes that follow, respectively. It is illegal to
37 for includes and excludes that follow, respectively. It is illegal to
38 have ``[include]`` after ``[exclude]``.
38 have ``[include]`` after ``[exclude]``.
39
39
40 Non-special lines resemble file patterns to be added to either includes
40 Non-special lines resemble file patterns to be added to either includes
41 or excludes. The syntax of these lines is documented by :hg:`help patterns`.
41 or excludes. The syntax of these lines is documented by :hg:`help patterns`.
42 Patterns are interpreted as ``glob:`` by default and match against the
42 Patterns are interpreted as ``glob:`` by default and match against the
43 root of the repository.
43 root of the repository.
44
44
45 Exclusion patterns take precedence over inclusion patterns. So even
45 Exclusion patterns take precedence over inclusion patterns. So even
46 if a file is explicitly included, an ``[exclude]`` entry can remove it.
46 if a file is explicitly included, an ``[exclude]`` entry can remove it.
47
47
48 For example, say you have a repository with 3 directories, ``frontend/``,
48 For example, say you have a repository with 3 directories, ``frontend/``,
49 ``backend/``, and ``tools/``. ``frontend/`` and ``backend/`` correspond
49 ``backend/``, and ``tools/``. ``frontend/`` and ``backend/`` correspond
50 to different projects and it is uncommon for someone working on one
50 to different projects and it is uncommon for someone working on one
51 to need the files for the other. But ``tools/`` contains files shared
51 to need the files for the other. But ``tools/`` contains files shared
52 between both projects. Your sparse config files may resemble::
52 between both projects. Your sparse config files may resemble::
53
53
54 # frontend.sparse
54 # frontend.sparse
55 frontend/**
55 frontend/**
56 tools/**
56 tools/**
57
57
58 # backend.sparse
58 # backend.sparse
59 backend/**
59 backend/**
60 tools/**
60 tools/**
61
61
62 Say the backend grows in size. Or there's a directory with thousands
62 Say the backend grows in size. Or there's a directory with thousands
63 of files you wish to exclude. You can modify the profile to exclude
63 of files you wish to exclude. You can modify the profile to exclude
64 certain files::
64 certain files::
65
65
66 [include]
66 [include]
67 backend/**
67 backend/**
68 tools/**
68 tools/**
69
69
70 [exclude]
70 [exclude]
71 tools/tests/**
71 tools/tests/**
72 """
72 """
73
73
74
74
75 from mercurial.i18n import _
75 from mercurial.i18n import _
76 from mercurial.pycompat import setattr
76 from mercurial.pycompat import setattr
77 from mercurial import (
77 from mercurial import (
78 cmdutil,
78 cmdutil,
79 commands,
79 commands,
80 dirstate,
80 dirstate,
81 error,
81 error,
82 extensions,
82 extensions,
83 logcmdutil,
83 logcmdutil,
84 match as matchmod,
85 merge as mergemod,
84 merge as mergemod,
86 pycompat,
85 pycompat,
87 registrar,
86 registrar,
88 sparse,
87 sparse,
89 util,
88 util,
90 )
89 )
91
90
92 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
91 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
93 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
92 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
94 # be specifying the version(s) of Mercurial they are tested with, or
93 # be specifying the version(s) of Mercurial they are tested with, or
95 # leave the attribute unspecified.
94 # leave the attribute unspecified.
96 testedwith = b'ships-with-hg-core'
95 testedwith = b'ships-with-hg-core'
97
96
98 cmdtable = {}
97 cmdtable = {}
99 command = registrar.command(cmdtable)
98 command = registrar.command(cmdtable)
100
99
101
100
102 def extsetup(ui):
101 def extsetup(ui):
103 sparse.enabled = True
102 sparse.enabled = True
104
103
105 _setupclone(ui)
104 _setupclone(ui)
106 _setuplog(ui)
105 _setuplog(ui)
107 _setupadd(ui)
106 _setupadd(ui)
108 _setupdirstate(ui)
107 _setupdirstate(ui)
109
108
110
109
111 def replacefilecache(cls, propname, replacement):
110 def replacefilecache(cls, propname, replacement):
112 """Replace a filecache property with a new class. This allows changing the
111 """Replace a filecache property with a new class. This allows changing the
113 cache invalidation condition."""
112 cache invalidation condition."""
114 origcls = cls
113 origcls = cls
115 assert callable(replacement)
114 assert callable(replacement)
116 while cls is not object:
115 while cls is not object:
117 if propname in cls.__dict__:
116 if propname in cls.__dict__:
118 orig = cls.__dict__[propname]
117 orig = cls.__dict__[propname]
119 setattr(cls, propname, replacement(orig))
118 setattr(cls, propname, replacement(orig))
120 break
119 break
121 cls = cls.__bases__[0]
120 cls = cls.__bases__[0]
122
121
123 if cls is object:
122 if cls is object:
124 raise AttributeError(
123 raise AttributeError(
125 _(b"type '%s' has no property '%s'") % (origcls, propname)
124 _(b"type '%s' has no property '%s'") % (origcls, propname)
126 )
125 )
127
126
128
127
129 def _setuplog(ui):
128 def _setuplog(ui):
130 entry = commands.table[b'log|history']
129 entry = commands.table[b'log|history']
131 entry[1].append(
130 entry[1].append(
132 (
131 (
133 b'',
132 b'',
134 b'sparse',
133 b'sparse',
135 None,
134 None,
136 b"limit to changesets affecting the sparse checkout",
135 b"limit to changesets affecting the sparse checkout",
137 )
136 )
138 )
137 )
139
138
140 def _initialrevs(orig, repo, wopts):
139 def _initialrevs(orig, repo, wopts):
141 revs = orig(repo, wopts)
140 revs = orig(repo, wopts)
142 if wopts.opts.get(b'sparse'):
141 if wopts.opts.get(b'sparse'):
143 sparsematch = sparse.matcher(repo)
142 sparsematch = sparse.matcher(repo)
144
143
145 def ctxmatch(rev):
144 def ctxmatch(rev):
146 ctx = repo[rev]
145 ctx = repo[rev]
147 return any(f for f in ctx.files() if sparsematch(f))
146 return any(f for f in ctx.files() if sparsematch(f))
148
147
149 revs = revs.filter(ctxmatch)
148 revs = revs.filter(ctxmatch)
150 return revs
149 return revs
151
150
152 extensions.wrapfunction(logcmdutil, b'_initialrevs', _initialrevs)
151 extensions.wrapfunction(logcmdutil, b'_initialrevs', _initialrevs)
153
152
154
153
155 def _clonesparsecmd(orig, ui, repo, *args, **opts):
154 def _clonesparsecmd(orig, ui, repo, *args, **opts):
156 include = opts.get('include')
155 include = opts.get('include')
157 exclude = opts.get('exclude')
156 exclude = opts.get('exclude')
158 enableprofile = opts.get('enable_profile')
157 enableprofile = opts.get('enable_profile')
159 narrow_pat = opts.get('narrow')
158 narrow_pat = opts.get('narrow')
160
159
161 # if --narrow is passed, it means they are includes and excludes for narrow
160 # if --narrow is passed, it means they are includes and excludes for narrow
162 # clone
161 # clone
163 if not narrow_pat and (include or exclude or enableprofile):
162 if not narrow_pat and (include or exclude or enableprofile):
164
163
165 def clonesparse(orig, ctx, *args, **kwargs):
164 def clonesparse(orig, ctx, *args, **kwargs):
166 sparse.updateconfig(
165 sparse.updateconfig(
167 ctx.repo().unfiltered(),
166 ctx.repo().unfiltered(),
168 {},
167 {},
169 include=include,
168 include=include,
170 exclude=exclude,
169 exclude=exclude,
171 enableprofile=enableprofile,
170 enableprofile=enableprofile,
172 usereporootpaths=True,
171 usereporootpaths=True,
173 )
172 )
174 return orig(ctx, *args, **kwargs)
173 return orig(ctx, *args, **kwargs)
175
174
176 extensions.wrapfunction(mergemod, b'update', clonesparse)
175 extensions.wrapfunction(mergemod, b'update', clonesparse)
177 return orig(ui, repo, *args, **opts)
176 return orig(ui, repo, *args, **opts)
178
177
179
178
180 def _setupclone(ui):
179 def _setupclone(ui):
181 entry = commands.table[b'clone']
180 entry = commands.table[b'clone']
182 entry[1].append((b'', b'enable-profile', [], b'enable a sparse profile'))
181 entry[1].append((b'', b'enable-profile', [], b'enable a sparse profile'))
183 entry[1].append((b'', b'include', [], b'include sparse pattern'))
182 entry[1].append((b'', b'include', [], b'include sparse pattern'))
184 entry[1].append((b'', b'exclude', [], b'exclude sparse pattern'))
183 entry[1].append((b'', b'exclude', [], b'exclude sparse pattern'))
185 extensions.wrapcommand(commands.table, b'clone', _clonesparsecmd)
184 extensions.wrapcommand(commands.table, b'clone', _clonesparsecmd)
186
185
187
186
188 def _setupadd(ui):
187 def _setupadd(ui):
189 entry = commands.table[b'add']
188 entry = commands.table[b'add']
190 entry[1].append(
189 entry[1].append(
191 (
190 (
192 b's',
191 b's',
193 b'sparse',
192 b'sparse',
194 None,
193 None,
195 b'also include directories of added files in sparse config',
194 b'also include directories of added files in sparse config',
196 )
195 )
197 )
196 )
198
197
199 def _add(orig, ui, repo, *pats, **opts):
198 def _add(orig, ui, repo, *pats, **opts):
200 if opts.get('sparse'):
199 if opts.get('sparse'):
201 dirs = set()
200 dirs = set()
202 for pat in pats:
201 for pat in pats:
203 dirname, basename = util.split(pat)
202 dirname, basename = util.split(pat)
204 dirs.add(dirname)
203 dirs.add(dirname)
205 sparse.updateconfig(repo, opts, include=list(dirs))
204 sparse.updateconfig(repo, opts, include=list(dirs))
206 return orig(ui, repo, *pats, **opts)
205 return orig(ui, repo, *pats, **opts)
207
206
208 extensions.wrapcommand(commands.table, b'add', _add)
207 extensions.wrapcommand(commands.table, b'add', _add)
209
208
210
209
211 def _setupdirstate(ui):
210 def _setupdirstate(ui):
212 """Modify the dirstate to prevent stat'ing excluded files,
211 """Modify the dirstate to prevent stat'ing excluded files,
213 and to prevent modifications to files outside the checkout.
212 and to prevent modifications to files outside the checkout.
214 """
213 """
215
214
216 def walk(orig, self, match, subrepos, unknown, ignored, full=True):
217 # hack to not exclude explicitly-specified paths so that they can
218 # be warned later on e.g. dirstate.add()
219 sparse_matcher = self._sparsematcher
220 if sparse_matcher is not None:
221 em = matchmod.exact(match.files())
222 sm = matchmod.unionmatcher([self._sparsematcher, em])
223 match = matchmod.intersectmatchers(match, sm)
224 return orig(self, match, subrepos, unknown, ignored, full)
225
226 extensions.wrapfunction(dirstate.dirstate, b'walk', walk)
227
228 # dirstate.rebuild should not add non-matching files
215 # dirstate.rebuild should not add non-matching files
229 def _rebuild(orig, self, parent, allfiles, changedfiles=None):
216 def _rebuild(orig, self, parent, allfiles, changedfiles=None):
230 matcher = self._sparsematcher
217 matcher = self._sparsematcher
231 if matcher is not None and not matcher.always():
218 if matcher is not None and not matcher.always():
232 allfiles = [f for f in allfiles if matcher(f)]
219 allfiles = [f for f in allfiles if matcher(f)]
233 if changedfiles:
220 if changedfiles:
234 changedfiles = [f for f in changedfiles if matcher(f)]
221 changedfiles = [f for f in changedfiles if matcher(f)]
235
222
236 if changedfiles is not None:
223 if changedfiles is not None:
237 # In _rebuild, these files will be deleted from the dirstate
224 # In _rebuild, these files will be deleted from the dirstate
238 # when they are not found to be in allfiles
225 # when they are not found to be in allfiles
239 dirstatefilestoremove = {f for f in self if not matcher(f)}
226 dirstatefilestoremove = {f for f in self if not matcher(f)}
240 changedfiles = dirstatefilestoremove.union(changedfiles)
227 changedfiles = dirstatefilestoremove.union(changedfiles)
241
228
242 return orig(self, parent, allfiles, changedfiles)
229 return orig(self, parent, allfiles, changedfiles)
243
230
244 extensions.wrapfunction(dirstate.dirstate, b'rebuild', _rebuild)
231 extensions.wrapfunction(dirstate.dirstate, b'rebuild', _rebuild)
245
232
246 # Prevent adding files that are outside the sparse checkout
233 # Prevent adding files that are outside the sparse checkout
247 editfuncs = [
234 editfuncs = [
248 b'set_tracked',
235 b'set_tracked',
249 b'set_untracked',
236 b'set_untracked',
250 b'copy',
237 b'copy',
251 ]
238 ]
252 hint = _(
239 hint = _(
253 b'include file with `hg debugsparse --include <pattern>` or use '
240 b'include file with `hg debugsparse --include <pattern>` or use '
254 + b'`hg add -s <file>` to include file directory while adding'
241 + b'`hg add -s <file>` to include file directory while adding'
255 )
242 )
256 for func in editfuncs:
243 for func in editfuncs:
257
244
258 def _wrapper(orig, self, *args, **kwargs):
245 def _wrapper(orig, self, *args, **kwargs):
259 sparsematch = self._sparsematcher
246 sparsematch = self._sparsematcher
260 if sparsematch is not None and not sparsematch.always():
247 if sparsematch is not None and not sparsematch.always():
261 for f in args:
248 for f in args:
262 if f is not None and not sparsematch(f) and f not in self:
249 if f is not None and not sparsematch(f) and f not in self:
263 raise error.Abort(
250 raise error.Abort(
264 _(
251 _(
265 b"cannot add '%s' - it is outside "
252 b"cannot add '%s' - it is outside "
266 b"the sparse checkout"
253 b"the sparse checkout"
267 )
254 )
268 % f,
255 % f,
269 hint=hint,
256 hint=hint,
270 )
257 )
271 return orig(self, *args, **kwargs)
258 return orig(self, *args, **kwargs)
272
259
273 extensions.wrapfunction(dirstate.dirstate, func, _wrapper)
260 extensions.wrapfunction(dirstate.dirstate, func, _wrapper)
274
261
275
262
276 @command(
263 @command(
277 b'debugsparse',
264 b'debugsparse',
278 [
265 [
279 (
266 (
280 b'I',
267 b'I',
281 b'include',
268 b'include',
282 [],
269 [],
283 _(b'include files in the sparse checkout'),
270 _(b'include files in the sparse checkout'),
284 _(b'PATTERN'),
271 _(b'PATTERN'),
285 ),
272 ),
286 (
273 (
287 b'X',
274 b'X',
288 b'exclude',
275 b'exclude',
289 [],
276 [],
290 _(b'exclude files in the sparse checkout'),
277 _(b'exclude files in the sparse checkout'),
291 _(b'PATTERN'),
278 _(b'PATTERN'),
292 ),
279 ),
293 (
280 (
294 b'd',
281 b'd',
295 b'delete',
282 b'delete',
296 [],
283 [],
297 _(b'delete an include/exclude rule'),
284 _(b'delete an include/exclude rule'),
298 _(b'PATTERN'),
285 _(b'PATTERN'),
299 ),
286 ),
300 (
287 (
301 b'f',
288 b'f',
302 b'force',
289 b'force',
303 False,
290 False,
304 _(b'allow changing rules even with pending changes'),
291 _(b'allow changing rules even with pending changes'),
305 ),
292 ),
306 (
293 (
307 b'',
294 b'',
308 b'enable-profile',
295 b'enable-profile',
309 [],
296 [],
310 _(b'enables the specified profile'),
297 _(b'enables the specified profile'),
311 _(b'PATTERN'),
298 _(b'PATTERN'),
312 ),
299 ),
313 (
300 (
314 b'',
301 b'',
315 b'disable-profile',
302 b'disable-profile',
316 [],
303 [],
317 _(b'disables the specified profile'),
304 _(b'disables the specified profile'),
318 _(b'PATTERN'),
305 _(b'PATTERN'),
319 ),
306 ),
320 (
307 (
321 b'',
308 b'',
322 b'import-rules',
309 b'import-rules',
323 [],
310 [],
324 _(b'imports rules from a file'),
311 _(b'imports rules from a file'),
325 _(b'PATTERN'),
312 _(b'PATTERN'),
326 ),
313 ),
327 (b'', b'clear-rules', False, _(b'clears local include/exclude rules')),
314 (b'', b'clear-rules', False, _(b'clears local include/exclude rules')),
328 (
315 (
329 b'',
316 b'',
330 b'refresh',
317 b'refresh',
331 False,
318 False,
332 _(b'updates the working after sparseness changes'),
319 _(b'updates the working after sparseness changes'),
333 ),
320 ),
334 (b'', b'reset', False, _(b'makes the repo full again')),
321 (b'', b'reset', False, _(b'makes the repo full again')),
335 ]
322 ]
336 + commands.templateopts,
323 + commands.templateopts,
337 _(b'[--OPTION]'),
324 _(b'[--OPTION]'),
338 helpbasic=True,
325 helpbasic=True,
339 )
326 )
340 def debugsparse(ui, repo, **opts):
327 def debugsparse(ui, repo, **opts):
341 """make the current checkout sparse, or edit the existing checkout
328 """make the current checkout sparse, or edit the existing checkout
342
329
343 The sparse command is used to make the current checkout sparse.
330 The sparse command is used to make the current checkout sparse.
344 This means files that don't meet the sparse condition will not be
331 This means files that don't meet the sparse condition will not be
345 written to disk, or show up in any working copy operations. It does
332 written to disk, or show up in any working copy operations. It does
346 not affect files in history in any way.
333 not affect files in history in any way.
347
334
348 Passing no arguments prints the currently applied sparse rules.
335 Passing no arguments prints the currently applied sparse rules.
349
336
350 --include and --exclude are used to add and remove files from the sparse
337 --include and --exclude are used to add and remove files from the sparse
351 checkout. The effects of adding an include or exclude rule are applied
338 checkout. The effects of adding an include or exclude rule are applied
352 immediately. If applying the new rule would cause a file with pending
339 immediately. If applying the new rule would cause a file with pending
353 changes to be added or removed, the command will fail. Pass --force to
340 changes to be added or removed, the command will fail. Pass --force to
354 force a rule change even with pending changes (the changes on disk will
341 force a rule change even with pending changes (the changes on disk will
355 be preserved).
342 be preserved).
356
343
357 --delete removes an existing include/exclude rule. The effects are
344 --delete removes an existing include/exclude rule. The effects are
358 immediate.
345 immediate.
359
346
360 --refresh refreshes the files on disk based on the sparse rules. This is
347 --refresh refreshes the files on disk based on the sparse rules. This is
361 only necessary if .hg/sparse was changed by hand.
348 only necessary if .hg/sparse was changed by hand.
362
349
363 --enable-profile and --disable-profile accept a path to a .hgsparse file.
350 --enable-profile and --disable-profile accept a path to a .hgsparse file.
364 This allows defining sparse checkouts and tracking them inside the
351 This allows defining sparse checkouts and tracking them inside the
365 repository. This is useful for defining commonly used sparse checkouts for
352 repository. This is useful for defining commonly used sparse checkouts for
366 many people to use. As the profile definition changes over time, the sparse
353 many people to use. As the profile definition changes over time, the sparse
367 checkout will automatically be updated appropriately, depending on which
354 checkout will automatically be updated appropriately, depending on which
368 changeset is checked out. Changes to .hgsparse are not applied until they
355 changeset is checked out. Changes to .hgsparse are not applied until they
369 have been committed.
356 have been committed.
370
357
371 --import-rules accepts a path to a file containing rules in the .hgsparse
358 --import-rules accepts a path to a file containing rules in the .hgsparse
372 format, allowing you to add --include, --exclude and --enable-profile rules
359 format, allowing you to add --include, --exclude and --enable-profile rules
373 in bulk. Like the --include, --exclude and --enable-profile switches, the
360 in bulk. Like the --include, --exclude and --enable-profile switches, the
374 changes are applied immediately.
361 changes are applied immediately.
375
362
376 --clear-rules removes all local include and exclude rules, while leaving
363 --clear-rules removes all local include and exclude rules, while leaving
377 any enabled profiles in place.
364 any enabled profiles in place.
378
365
379 Returns 0 if editing the sparse checkout succeeds.
366 Returns 0 if editing the sparse checkout succeeds.
380 """
367 """
381 opts = pycompat.byteskwargs(opts)
368 opts = pycompat.byteskwargs(opts)
382 include = opts.get(b'include')
369 include = opts.get(b'include')
383 exclude = opts.get(b'exclude')
370 exclude = opts.get(b'exclude')
384 force = opts.get(b'force')
371 force = opts.get(b'force')
385 enableprofile = opts.get(b'enable_profile')
372 enableprofile = opts.get(b'enable_profile')
386 disableprofile = opts.get(b'disable_profile')
373 disableprofile = opts.get(b'disable_profile')
387 importrules = opts.get(b'import_rules')
374 importrules = opts.get(b'import_rules')
388 clearrules = opts.get(b'clear_rules')
375 clearrules = opts.get(b'clear_rules')
389 delete = opts.get(b'delete')
376 delete = opts.get(b'delete')
390 refresh = opts.get(b'refresh')
377 refresh = opts.get(b'refresh')
391 reset = opts.get(b'reset')
378 reset = opts.get(b'reset')
392 action = cmdutil.check_at_most_one_arg(
379 action = cmdutil.check_at_most_one_arg(
393 opts, b'import_rules', b'clear_rules', b'refresh'
380 opts, b'import_rules', b'clear_rules', b'refresh'
394 )
381 )
395 updateconfig = bool(
382 updateconfig = bool(
396 include or exclude or delete or reset or enableprofile or disableprofile
383 include or exclude or delete or reset or enableprofile or disableprofile
397 )
384 )
398 count = sum([updateconfig, bool(action)])
385 count = sum([updateconfig, bool(action)])
399 if count > 1:
386 if count > 1:
400 raise error.Abort(_(b"too many flags specified"))
387 raise error.Abort(_(b"too many flags specified"))
401
388
402 # enable sparse on repo even if the requirements is missing.
389 # enable sparse on repo even if the requirements is missing.
403 repo._has_sparse = True
390 repo._has_sparse = True
404
391
405 if count == 0:
392 if count == 0:
406 if repo.vfs.exists(b'sparse'):
393 if repo.vfs.exists(b'sparse'):
407 ui.status(repo.vfs.read(b"sparse") + b"\n")
394 ui.status(repo.vfs.read(b"sparse") + b"\n")
408 temporaryincludes = sparse.readtemporaryincludes(repo)
395 temporaryincludes = sparse.readtemporaryincludes(repo)
409 if temporaryincludes:
396 if temporaryincludes:
410 ui.status(
397 ui.status(
411 _(b"Temporarily Included Files (for merge/rebase):\n")
398 _(b"Temporarily Included Files (for merge/rebase):\n")
412 )
399 )
413 ui.status((b"\n".join(temporaryincludes) + b"\n"))
400 ui.status((b"\n".join(temporaryincludes) + b"\n"))
414 return
401 return
415 else:
402 else:
416 raise error.Abort(
403 raise error.Abort(
417 _(
404 _(
418 b'the debugsparse command is only supported on'
405 b'the debugsparse command is only supported on'
419 b' sparse repositories'
406 b' sparse repositories'
420 )
407 )
421 )
408 )
422
409
423 if updateconfig:
410 if updateconfig:
424 sparse.updateconfig(
411 sparse.updateconfig(
425 repo,
412 repo,
426 opts,
413 opts,
427 include=include,
414 include=include,
428 exclude=exclude,
415 exclude=exclude,
429 reset=reset,
416 reset=reset,
430 delete=delete,
417 delete=delete,
431 enableprofile=enableprofile,
418 enableprofile=enableprofile,
432 disableprofile=disableprofile,
419 disableprofile=disableprofile,
433 force=force,
420 force=force,
434 )
421 )
435
422
436 if importrules:
423 if importrules:
437 sparse.importfromfiles(repo, opts, importrules, force=force)
424 sparse.importfromfiles(repo, opts, importrules, force=force)
438
425
439 if clearrules:
426 if clearrules:
440 sparse.clearrules(repo, force=force)
427 sparse.clearrules(repo, force=force)
441
428
442 if refresh:
429 if refresh:
443 try:
430 try:
444 wlock = repo.wlock()
431 wlock = repo.wlock()
445 fcounts = map(
432 fcounts = map(
446 len,
433 len,
447 sparse.refreshwdir(
434 sparse.refreshwdir(
448 repo, repo.status(), sparse.matcher(repo), force=force
435 repo, repo.status(), sparse.matcher(repo), force=force
449 ),
436 ),
450 )
437 )
451 sparse.printchanges(
438 sparse.printchanges(
452 ui,
439 ui,
453 opts,
440 opts,
454 added=fcounts[0],
441 added=fcounts[0],
455 dropped=fcounts[1],
442 dropped=fcounts[1],
456 conflicting=fcounts[2],
443 conflicting=fcounts[2],
457 )
444 )
458 finally:
445 finally:
459 wlock.release()
446 wlock.release()
460
447
461 del repo._has_sparse
448 del repo._has_sparse
@@ -1,1474 +1,1479 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 timestamp,
34 timestamp,
35 )
35 )
36
36
37 from .interfaces import (
37 from .interfaces import (
38 dirstate as intdirstate,
38 dirstate as intdirstate,
39 util as interfaceutil,
39 util as interfaceutil,
40 )
40 )
41
41
42 parsers = policy.importmod('parsers')
42 parsers = policy.importmod('parsers')
43 rustmod = policy.importrust('dirstate')
43 rustmod = policy.importrust('dirstate')
44
44
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46
46
47 propertycache = util.propertycache
47 propertycache = util.propertycache
48 filecache = scmutil.filecache
48 filecache = scmutil.filecache
49 _rangemask = dirstatemap.rangemask
49 _rangemask = dirstatemap.rangemask
50
50
51 DirstateItem = dirstatemap.DirstateItem
51 DirstateItem = dirstatemap.DirstateItem
52
52
53
53
54 class repocache(filecache):
54 class repocache(filecache):
55 """filecache for files in .hg/"""
55 """filecache for files in .hg/"""
56
56
57 def join(self, obj, fname):
57 def join(self, obj, fname):
58 return obj._opener.join(fname)
58 return obj._opener.join(fname)
59
59
60
60
61 class rootcache(filecache):
61 class rootcache(filecache):
62 """filecache for files in the repository root"""
62 """filecache for files in the repository root"""
63
63
64 def join(self, obj, fname):
64 def join(self, obj, fname):
65 return obj._join(fname)
65 return obj._join(fname)
66
66
67
67
68 def requires_parents_change(func):
68 def requires_parents_change(func):
69 def wrap(self, *args, **kwargs):
69 def wrap(self, *args, **kwargs):
70 if not self.pendingparentchange():
70 if not self.pendingparentchange():
71 msg = 'calling `%s` outside of a parentchange context'
71 msg = 'calling `%s` outside of a parentchange context'
72 msg %= func.__name__
72 msg %= func.__name__
73 raise error.ProgrammingError(msg)
73 raise error.ProgrammingError(msg)
74 return func(self, *args, **kwargs)
74 return func(self, *args, **kwargs)
75
75
76 return wrap
76 return wrap
77
77
78
78
79 def requires_no_parents_change(func):
79 def requires_no_parents_change(func):
80 def wrap(self, *args, **kwargs):
80 def wrap(self, *args, **kwargs):
81 if self.pendingparentchange():
81 if self.pendingparentchange():
82 msg = 'calling `%s` inside of a parentchange context'
82 msg = 'calling `%s` inside of a parentchange context'
83 msg %= func.__name__
83 msg %= func.__name__
84 raise error.ProgrammingError(msg)
84 raise error.ProgrammingError(msg)
85 return func(self, *args, **kwargs)
85 return func(self, *args, **kwargs)
86
86
87 return wrap
87 return wrap
88
88
89
89
90 @interfaceutil.implementer(intdirstate.idirstate)
90 @interfaceutil.implementer(intdirstate.idirstate)
91 class dirstate:
91 class dirstate:
92 def __init__(
92 def __init__(
93 self,
93 self,
94 opener,
94 opener,
95 ui,
95 ui,
96 root,
96 root,
97 validate,
97 validate,
98 sparsematchfn,
98 sparsematchfn,
99 nodeconstants,
99 nodeconstants,
100 use_dirstate_v2,
100 use_dirstate_v2,
101 use_tracked_hint=False,
101 use_tracked_hint=False,
102 ):
102 ):
103 """Create a new dirstate object.
103 """Create a new dirstate object.
104
104
105 opener is an open()-like callable that can be used to open the
105 opener is an open()-like callable that can be used to open the
106 dirstate file; root is the root of the directory tracked by
106 dirstate file; root is the root of the directory tracked by
107 the dirstate.
107 the dirstate.
108 """
108 """
109 self._use_dirstate_v2 = use_dirstate_v2
109 self._use_dirstate_v2 = use_dirstate_v2
110 self._use_tracked_hint = use_tracked_hint
110 self._use_tracked_hint = use_tracked_hint
111 self._nodeconstants = nodeconstants
111 self._nodeconstants = nodeconstants
112 self._opener = opener
112 self._opener = opener
113 self._validate = validate
113 self._validate = validate
114 self._root = root
114 self._root = root
115 # Either build a sparse-matcher or None if sparse is disabled
115 # Either build a sparse-matcher or None if sparse is disabled
116 self._sparsematchfn = sparsematchfn
116 self._sparsematchfn = sparsematchfn
117 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
117 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
118 # UNC path pointing to root share (issue4557)
118 # UNC path pointing to root share (issue4557)
119 self._rootdir = pathutil.normasprefix(root)
119 self._rootdir = pathutil.normasprefix(root)
120 # True is any internal state may be different
120 # True is any internal state may be different
121 self._dirty = False
121 self._dirty = False
122 # True if the set of tracked file may be different
122 # True if the set of tracked file may be different
123 self._dirty_tracked_set = False
123 self._dirty_tracked_set = False
124 self._ui = ui
124 self._ui = ui
125 self._filecache = {}
125 self._filecache = {}
126 self._parentwriters = 0
126 self._parentwriters = 0
127 self._filename = b'dirstate'
127 self._filename = b'dirstate'
128 self._filename_th = b'dirstate-tracked-hint'
128 self._filename_th = b'dirstate-tracked-hint'
129 self._pendingfilename = b'%s.pending' % self._filename
129 self._pendingfilename = b'%s.pending' % self._filename
130 self._plchangecallbacks = {}
130 self._plchangecallbacks = {}
131 self._origpl = None
131 self._origpl = None
132 self._mapcls = dirstatemap.dirstatemap
132 self._mapcls = dirstatemap.dirstatemap
133 # Access and cache cwd early, so we don't access it for the first time
133 # Access and cache cwd early, so we don't access it for the first time
134 # after a working-copy update caused it to not exist (accessing it then
134 # after a working-copy update caused it to not exist (accessing it then
135 # raises an exception).
135 # raises an exception).
136 self._cwd
136 self._cwd
137
137
138 def prefetch_parents(self):
138 def prefetch_parents(self):
139 """make sure the parents are loaded
139 """make sure the parents are loaded
140
140
141 Used to avoid a race condition.
141 Used to avoid a race condition.
142 """
142 """
143 self._pl
143 self._pl
144
144
145 @contextlib.contextmanager
145 @contextlib.contextmanager
146 def parentchange(self):
146 def parentchange(self):
147 """Context manager for handling dirstate parents.
147 """Context manager for handling dirstate parents.
148
148
149 If an exception occurs in the scope of the context manager,
149 If an exception occurs in the scope of the context manager,
150 the incoherent dirstate won't be written when wlock is
150 the incoherent dirstate won't be written when wlock is
151 released.
151 released.
152 """
152 """
153 self._parentwriters += 1
153 self._parentwriters += 1
154 yield
154 yield
155 # Typically we want the "undo" step of a context manager in a
155 # Typically we want the "undo" step of a context manager in a
156 # finally block so it happens even when an exception
156 # finally block so it happens even when an exception
157 # occurs. In this case, however, we only want to decrement
157 # occurs. In this case, however, we only want to decrement
158 # parentwriters if the code in the with statement exits
158 # parentwriters if the code in the with statement exits
159 # normally, so we don't have a try/finally here on purpose.
159 # normally, so we don't have a try/finally here on purpose.
160 self._parentwriters -= 1
160 self._parentwriters -= 1
161
161
162 def pendingparentchange(self):
162 def pendingparentchange(self):
163 """Returns true if the dirstate is in the middle of a set of changes
163 """Returns true if the dirstate is in the middle of a set of changes
164 that modify the dirstate parent.
164 that modify the dirstate parent.
165 """
165 """
166 return self._parentwriters > 0
166 return self._parentwriters > 0
167
167
168 @propertycache
168 @propertycache
169 def _map(self):
169 def _map(self):
170 """Return the dirstate contents (see documentation for dirstatemap)."""
170 """Return the dirstate contents (see documentation for dirstatemap)."""
171 self._map = self._mapcls(
171 self._map = self._mapcls(
172 self._ui,
172 self._ui,
173 self._opener,
173 self._opener,
174 self._root,
174 self._root,
175 self._nodeconstants,
175 self._nodeconstants,
176 self._use_dirstate_v2,
176 self._use_dirstate_v2,
177 )
177 )
178 return self._map
178 return self._map
179
179
180 @property
180 @property
181 def _sparsematcher(self):
181 def _sparsematcher(self):
182 """The matcher for the sparse checkout.
182 """The matcher for the sparse checkout.
183
183
184 The working directory may not include every file from a manifest. The
184 The working directory may not include every file from a manifest. The
185 matcher obtained by this property will match a path if it is to be
185 matcher obtained by this property will match a path if it is to be
186 included in the working directory.
186 included in the working directory.
187
187
188 When sparse if disabled, return None.
188 When sparse if disabled, return None.
189 """
189 """
190 if self._sparsematchfn is None:
190 if self._sparsematchfn is None:
191 return None
191 return None
192 # TODO there is potential to cache this property. For now, the matcher
192 # TODO there is potential to cache this property. For now, the matcher
193 # is resolved on every access. (But the called function does use a
193 # is resolved on every access. (But the called function does use a
194 # cache to keep the lookup fast.)
194 # cache to keep the lookup fast.)
195 return self._sparsematchfn()
195 return self._sparsematchfn()
196
196
197 @repocache(b'branch')
197 @repocache(b'branch')
198 def _branch(self):
198 def _branch(self):
199 try:
199 try:
200 return self._opener.read(b"branch").strip() or b"default"
200 return self._opener.read(b"branch").strip() or b"default"
201 except FileNotFoundError:
201 except FileNotFoundError:
202 return b"default"
202 return b"default"
203
203
204 @property
204 @property
205 def _pl(self):
205 def _pl(self):
206 return self._map.parents()
206 return self._map.parents()
207
207
208 def hasdir(self, d):
208 def hasdir(self, d):
209 return self._map.hastrackeddir(d)
209 return self._map.hastrackeddir(d)
210
210
211 @rootcache(b'.hgignore')
211 @rootcache(b'.hgignore')
212 def _ignore(self):
212 def _ignore(self):
213 files = self._ignorefiles()
213 files = self._ignorefiles()
214 if not files:
214 if not files:
215 return matchmod.never()
215 return matchmod.never()
216
216
217 pats = [b'include:%s' % f for f in files]
217 pats = [b'include:%s' % f for f in files]
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
219
219
220 @propertycache
220 @propertycache
221 def _slash(self):
221 def _slash(self):
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
223
223
224 @propertycache
224 @propertycache
225 def _checklink(self):
225 def _checklink(self):
226 return util.checklink(self._root)
226 return util.checklink(self._root)
227
227
228 @propertycache
228 @propertycache
229 def _checkexec(self):
229 def _checkexec(self):
230 return bool(util.checkexec(self._root))
230 return bool(util.checkexec(self._root))
231
231
232 @propertycache
232 @propertycache
233 def _checkcase(self):
233 def _checkcase(self):
234 return not util.fscasesensitive(self._join(b'.hg'))
234 return not util.fscasesensitive(self._join(b'.hg'))
235
235
236 def _join(self, f):
236 def _join(self, f):
237 # much faster than os.path.join()
237 # much faster than os.path.join()
238 # it's safe because f is always a relative path
238 # it's safe because f is always a relative path
239 return self._rootdir + f
239 return self._rootdir + f
240
240
241 def flagfunc(self, buildfallback):
241 def flagfunc(self, buildfallback):
242 """build a callable that returns flags associated with a filename
242 """build a callable that returns flags associated with a filename
243
243
244 The information is extracted from three possible layers:
244 The information is extracted from three possible layers:
245 1. the file system if it supports the information
245 1. the file system if it supports the information
246 2. the "fallback" information stored in the dirstate if any
246 2. the "fallback" information stored in the dirstate if any
247 3. a more expensive mechanism inferring the flags from the parents.
247 3. a more expensive mechanism inferring the flags from the parents.
248 """
248 """
249
249
250 # small hack to cache the result of buildfallback()
250 # small hack to cache the result of buildfallback()
251 fallback_func = []
251 fallback_func = []
252
252
253 def get_flags(x):
253 def get_flags(x):
254 entry = None
254 entry = None
255 fallback_value = None
255 fallback_value = None
256 try:
256 try:
257 st = os.lstat(self._join(x))
257 st = os.lstat(self._join(x))
258 except OSError:
258 except OSError:
259 return b''
259 return b''
260
260
261 if self._checklink:
261 if self._checklink:
262 if util.statislink(st):
262 if util.statislink(st):
263 return b'l'
263 return b'l'
264 else:
264 else:
265 entry = self.get_entry(x)
265 entry = self.get_entry(x)
266 if entry.has_fallback_symlink:
266 if entry.has_fallback_symlink:
267 if entry.fallback_symlink:
267 if entry.fallback_symlink:
268 return b'l'
268 return b'l'
269 else:
269 else:
270 if not fallback_func:
270 if not fallback_func:
271 fallback_func.append(buildfallback())
271 fallback_func.append(buildfallback())
272 fallback_value = fallback_func[0](x)
272 fallback_value = fallback_func[0](x)
273 if b'l' in fallback_value:
273 if b'l' in fallback_value:
274 return b'l'
274 return b'l'
275
275
276 if self._checkexec:
276 if self._checkexec:
277 if util.statisexec(st):
277 if util.statisexec(st):
278 return b'x'
278 return b'x'
279 else:
279 else:
280 if entry is None:
280 if entry is None:
281 entry = self.get_entry(x)
281 entry = self.get_entry(x)
282 if entry.has_fallback_exec:
282 if entry.has_fallback_exec:
283 if entry.fallback_exec:
283 if entry.fallback_exec:
284 return b'x'
284 return b'x'
285 else:
285 else:
286 if fallback_value is None:
286 if fallback_value is None:
287 if not fallback_func:
287 if not fallback_func:
288 fallback_func.append(buildfallback())
288 fallback_func.append(buildfallback())
289 fallback_value = fallback_func[0](x)
289 fallback_value = fallback_func[0](x)
290 if b'x' in fallback_value:
290 if b'x' in fallback_value:
291 return b'x'
291 return b'x'
292 return b''
292 return b''
293
293
294 return get_flags
294 return get_flags
295
295
296 @propertycache
296 @propertycache
297 def _cwd(self):
297 def _cwd(self):
298 # internal config: ui.forcecwd
298 # internal config: ui.forcecwd
299 forcecwd = self._ui.config(b'ui', b'forcecwd')
299 forcecwd = self._ui.config(b'ui', b'forcecwd')
300 if forcecwd:
300 if forcecwd:
301 return forcecwd
301 return forcecwd
302 return encoding.getcwd()
302 return encoding.getcwd()
303
303
304 def getcwd(self):
304 def getcwd(self):
305 """Return the path from which a canonical path is calculated.
305 """Return the path from which a canonical path is calculated.
306
306
307 This path should be used to resolve file patterns or to convert
307 This path should be used to resolve file patterns or to convert
308 canonical paths back to file paths for display. It shouldn't be
308 canonical paths back to file paths for display. It shouldn't be
309 used to get real file paths. Use vfs functions instead.
309 used to get real file paths. Use vfs functions instead.
310 """
310 """
311 cwd = self._cwd
311 cwd = self._cwd
312 if cwd == self._root:
312 if cwd == self._root:
313 return b''
313 return b''
314 # self._root ends with a path separator if self._root is '/' or 'C:\'
314 # self._root ends with a path separator if self._root is '/' or 'C:\'
315 rootsep = self._root
315 rootsep = self._root
316 if not util.endswithsep(rootsep):
316 if not util.endswithsep(rootsep):
317 rootsep += pycompat.ossep
317 rootsep += pycompat.ossep
318 if cwd.startswith(rootsep):
318 if cwd.startswith(rootsep):
319 return cwd[len(rootsep) :]
319 return cwd[len(rootsep) :]
320 else:
320 else:
321 # we're outside the repo. return an absolute path.
321 # we're outside the repo. return an absolute path.
322 return cwd
322 return cwd
323
323
324 def pathto(self, f, cwd=None):
324 def pathto(self, f, cwd=None):
325 if cwd is None:
325 if cwd is None:
326 cwd = self.getcwd()
326 cwd = self.getcwd()
327 path = util.pathto(self._root, cwd, f)
327 path = util.pathto(self._root, cwd, f)
328 if self._slash:
328 if self._slash:
329 return util.pconvert(path)
329 return util.pconvert(path)
330 return path
330 return path
331
331
332 def get_entry(self, path):
332 def get_entry(self, path):
333 """return a DirstateItem for the associated path"""
333 """return a DirstateItem for the associated path"""
334 entry = self._map.get(path)
334 entry = self._map.get(path)
335 if entry is None:
335 if entry is None:
336 return DirstateItem()
336 return DirstateItem()
337 return entry
337 return entry
338
338
339 def __contains__(self, key):
339 def __contains__(self, key):
340 return key in self._map
340 return key in self._map
341
341
342 def __iter__(self):
342 def __iter__(self):
343 return iter(sorted(self._map))
343 return iter(sorted(self._map))
344
344
345 def items(self):
345 def items(self):
346 return self._map.items()
346 return self._map.items()
347
347
348 iteritems = items
348 iteritems = items
349
349
350 def parents(self):
350 def parents(self):
351 return [self._validate(p) for p in self._pl]
351 return [self._validate(p) for p in self._pl]
352
352
353 def p1(self):
353 def p1(self):
354 return self._validate(self._pl[0])
354 return self._validate(self._pl[0])
355
355
356 def p2(self):
356 def p2(self):
357 return self._validate(self._pl[1])
357 return self._validate(self._pl[1])
358
358
359 @property
359 @property
360 def in_merge(self):
360 def in_merge(self):
361 """True if a merge is in progress"""
361 """True if a merge is in progress"""
362 return self._pl[1] != self._nodeconstants.nullid
362 return self._pl[1] != self._nodeconstants.nullid
363
363
364 def branch(self):
364 def branch(self):
365 return encoding.tolocal(self._branch)
365 return encoding.tolocal(self._branch)
366
366
367 def setparents(self, p1, p2=None):
367 def setparents(self, p1, p2=None):
368 """Set dirstate parents to p1 and p2.
368 """Set dirstate parents to p1 and p2.
369
369
370 When moving from two parents to one, "merged" entries a
370 When moving from two parents to one, "merged" entries a
371 adjusted to normal and previous copy records discarded and
371 adjusted to normal and previous copy records discarded and
372 returned by the call.
372 returned by the call.
373
373
374 See localrepo.setparents()
374 See localrepo.setparents()
375 """
375 """
376 if p2 is None:
376 if p2 is None:
377 p2 = self._nodeconstants.nullid
377 p2 = self._nodeconstants.nullid
378 if self._parentwriters == 0:
378 if self._parentwriters == 0:
379 raise ValueError(
379 raise ValueError(
380 b"cannot set dirstate parent outside of "
380 b"cannot set dirstate parent outside of "
381 b"dirstate.parentchange context manager"
381 b"dirstate.parentchange context manager"
382 )
382 )
383
383
384 self._dirty = True
384 self._dirty = True
385 oldp2 = self._pl[1]
385 oldp2 = self._pl[1]
386 if self._origpl is None:
386 if self._origpl is None:
387 self._origpl = self._pl
387 self._origpl = self._pl
388 nullid = self._nodeconstants.nullid
388 nullid = self._nodeconstants.nullid
389 # True if we need to fold p2 related state back to a linear case
389 # True if we need to fold p2 related state back to a linear case
390 fold_p2 = oldp2 != nullid and p2 == nullid
390 fold_p2 = oldp2 != nullid and p2 == nullid
391 return self._map.setparents(p1, p2, fold_p2=fold_p2)
391 return self._map.setparents(p1, p2, fold_p2=fold_p2)
392
392
393 def setbranch(self, branch):
393 def setbranch(self, branch):
394 self.__class__._branch.set(self, encoding.fromlocal(branch))
394 self.__class__._branch.set(self, encoding.fromlocal(branch))
395 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
395 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
396 try:
396 try:
397 f.write(self._branch + b'\n')
397 f.write(self._branch + b'\n')
398 f.close()
398 f.close()
399
399
400 # make sure filecache has the correct stat info for _branch after
400 # make sure filecache has the correct stat info for _branch after
401 # replacing the underlying file
401 # replacing the underlying file
402 ce = self._filecache[b'_branch']
402 ce = self._filecache[b'_branch']
403 if ce:
403 if ce:
404 ce.refresh()
404 ce.refresh()
405 except: # re-raises
405 except: # re-raises
406 f.discard()
406 f.discard()
407 raise
407 raise
408
408
409 def invalidate(self):
409 def invalidate(self):
410 """Causes the next access to reread the dirstate.
410 """Causes the next access to reread the dirstate.
411
411
412 This is different from localrepo.invalidatedirstate() because it always
412 This is different from localrepo.invalidatedirstate() because it always
413 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
413 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
414 check whether the dirstate has changed before rereading it."""
414 check whether the dirstate has changed before rereading it."""
415
415
416 for a in ("_map", "_branch", "_ignore"):
416 for a in ("_map", "_branch", "_ignore"):
417 if a in self.__dict__:
417 if a in self.__dict__:
418 delattr(self, a)
418 delattr(self, a)
419 self._dirty = False
419 self._dirty = False
420 self._dirty_tracked_set = False
420 self._dirty_tracked_set = False
421 self._parentwriters = 0
421 self._parentwriters = 0
422 self._origpl = None
422 self._origpl = None
423
423
424 def copy(self, source, dest):
424 def copy(self, source, dest):
425 """Mark dest as a copy of source. Unmark dest if source is None."""
425 """Mark dest as a copy of source. Unmark dest if source is None."""
426 if source == dest:
426 if source == dest:
427 return
427 return
428 self._dirty = True
428 self._dirty = True
429 if source is not None:
429 if source is not None:
430 self._map.copymap[dest] = source
430 self._map.copymap[dest] = source
431 else:
431 else:
432 self._map.copymap.pop(dest, None)
432 self._map.copymap.pop(dest, None)
433
433
434 def copied(self, file):
434 def copied(self, file):
435 return self._map.copymap.get(file, None)
435 return self._map.copymap.get(file, None)
436
436
437 def copies(self):
437 def copies(self):
438 return self._map.copymap
438 return self._map.copymap
439
439
440 @requires_no_parents_change
440 @requires_no_parents_change
441 def set_tracked(self, filename, reset_copy=False):
441 def set_tracked(self, filename, reset_copy=False):
442 """a "public" method for generic code to mark a file as tracked
442 """a "public" method for generic code to mark a file as tracked
443
443
444 This function is to be called outside of "update/merge" case. For
444 This function is to be called outside of "update/merge" case. For
445 example by a command like `hg add X`.
445 example by a command like `hg add X`.
446
446
447 if reset_copy is set, any existing copy information will be dropped.
447 if reset_copy is set, any existing copy information will be dropped.
448
448
449 return True the file was previously untracked, False otherwise.
449 return True the file was previously untracked, False otherwise.
450 """
450 """
451 self._dirty = True
451 self._dirty = True
452 entry = self._map.get(filename)
452 entry = self._map.get(filename)
453 if entry is None or not entry.tracked:
453 if entry is None or not entry.tracked:
454 self._check_new_tracked_filename(filename)
454 self._check_new_tracked_filename(filename)
455 pre_tracked = self._map.set_tracked(filename)
455 pre_tracked = self._map.set_tracked(filename)
456 if reset_copy:
456 if reset_copy:
457 self._map.copymap.pop(filename, None)
457 self._map.copymap.pop(filename, None)
458 if pre_tracked:
458 if pre_tracked:
459 self._dirty_tracked_set = True
459 self._dirty_tracked_set = True
460 return pre_tracked
460 return pre_tracked
461
461
462 @requires_no_parents_change
462 @requires_no_parents_change
463 def set_untracked(self, filename):
463 def set_untracked(self, filename):
464 """a "public" method for generic code to mark a file as untracked
464 """a "public" method for generic code to mark a file as untracked
465
465
466 This function is to be called outside of "update/merge" case. For
466 This function is to be called outside of "update/merge" case. For
467 example by a command like `hg remove X`.
467 example by a command like `hg remove X`.
468
468
469 return True the file was previously tracked, False otherwise.
469 return True the file was previously tracked, False otherwise.
470 """
470 """
471 ret = self._map.set_untracked(filename)
471 ret = self._map.set_untracked(filename)
472 if ret:
472 if ret:
473 self._dirty = True
473 self._dirty = True
474 self._dirty_tracked_set = True
474 self._dirty_tracked_set = True
475 return ret
475 return ret
476
476
477 @requires_no_parents_change
477 @requires_no_parents_change
478 def set_clean(self, filename, parentfiledata):
478 def set_clean(self, filename, parentfiledata):
479 """record that the current state of the file on disk is known to be clean"""
479 """record that the current state of the file on disk is known to be clean"""
480 self._dirty = True
480 self._dirty = True
481 if not self._map[filename].tracked:
481 if not self._map[filename].tracked:
482 self._check_new_tracked_filename(filename)
482 self._check_new_tracked_filename(filename)
483 (mode, size, mtime) = parentfiledata
483 (mode, size, mtime) = parentfiledata
484 self._map.set_clean(filename, mode, size, mtime)
484 self._map.set_clean(filename, mode, size, mtime)
485
485
486 @requires_no_parents_change
486 @requires_no_parents_change
487 def set_possibly_dirty(self, filename):
487 def set_possibly_dirty(self, filename):
488 """record that the current state of the file on disk is unknown"""
488 """record that the current state of the file on disk is unknown"""
489 self._dirty = True
489 self._dirty = True
490 self._map.set_possibly_dirty(filename)
490 self._map.set_possibly_dirty(filename)
491
491
492 @requires_parents_change
492 @requires_parents_change
493 def update_file_p1(
493 def update_file_p1(
494 self,
494 self,
495 filename,
495 filename,
496 p1_tracked,
496 p1_tracked,
497 ):
497 ):
498 """Set a file as tracked in the parent (or not)
498 """Set a file as tracked in the parent (or not)
499
499
500 This is to be called when adjust the dirstate to a new parent after an history
500 This is to be called when adjust the dirstate to a new parent after an history
501 rewriting operation.
501 rewriting operation.
502
502
503 It should not be called during a merge (p2 != nullid) and only within
503 It should not be called during a merge (p2 != nullid) and only within
504 a `with dirstate.parentchange():` context.
504 a `with dirstate.parentchange():` context.
505 """
505 """
506 if self.in_merge:
506 if self.in_merge:
507 msg = b'update_file_reference should not be called when merging'
507 msg = b'update_file_reference should not be called when merging'
508 raise error.ProgrammingError(msg)
508 raise error.ProgrammingError(msg)
509 entry = self._map.get(filename)
509 entry = self._map.get(filename)
510 if entry is None:
510 if entry is None:
511 wc_tracked = False
511 wc_tracked = False
512 else:
512 else:
513 wc_tracked = entry.tracked
513 wc_tracked = entry.tracked
514 if not (p1_tracked or wc_tracked):
514 if not (p1_tracked or wc_tracked):
515 # the file is no longer relevant to anyone
515 # the file is no longer relevant to anyone
516 if self._map.get(filename) is not None:
516 if self._map.get(filename) is not None:
517 self._map.reset_state(filename)
517 self._map.reset_state(filename)
518 self._dirty = True
518 self._dirty = True
519 elif (not p1_tracked) and wc_tracked:
519 elif (not p1_tracked) and wc_tracked:
520 if entry is not None and entry.added:
520 if entry is not None and entry.added:
521 return # avoid dropping copy information (maybe?)
521 return # avoid dropping copy information (maybe?)
522
522
523 self._map.reset_state(
523 self._map.reset_state(
524 filename,
524 filename,
525 wc_tracked,
525 wc_tracked,
526 p1_tracked,
526 p1_tracked,
527 # the underlying reference might have changed, we will have to
527 # the underlying reference might have changed, we will have to
528 # check it.
528 # check it.
529 has_meaningful_mtime=False,
529 has_meaningful_mtime=False,
530 )
530 )
531
531
532 @requires_parents_change
532 @requires_parents_change
533 def update_file(
533 def update_file(
534 self,
534 self,
535 filename,
535 filename,
536 wc_tracked,
536 wc_tracked,
537 p1_tracked,
537 p1_tracked,
538 p2_info=False,
538 p2_info=False,
539 possibly_dirty=False,
539 possibly_dirty=False,
540 parentfiledata=None,
540 parentfiledata=None,
541 ):
541 ):
542 """update the information about a file in the dirstate
542 """update the information about a file in the dirstate
543
543
544 This is to be called when the direstates parent changes to keep track
544 This is to be called when the direstates parent changes to keep track
545 of what is the file situation in regards to the working copy and its parent.
545 of what is the file situation in regards to the working copy and its parent.
546
546
547 This function must be called within a `dirstate.parentchange` context.
547 This function must be called within a `dirstate.parentchange` context.
548
548
549 note: the API is at an early stage and we might need to adjust it
549 note: the API is at an early stage and we might need to adjust it
550 depending of what information ends up being relevant and useful to
550 depending of what information ends up being relevant and useful to
551 other processing.
551 other processing.
552 """
552 """
553
553
554 # note: I do not think we need to double check name clash here since we
554 # note: I do not think we need to double check name clash here since we
555 # are in a update/merge case that should already have taken care of
555 # are in a update/merge case that should already have taken care of
556 # this. The test agrees
556 # this. The test agrees
557
557
558 self._dirty = True
558 self._dirty = True
559 old_entry = self._map.get(filename)
559 old_entry = self._map.get(filename)
560 if old_entry is None:
560 if old_entry is None:
561 prev_tracked = False
561 prev_tracked = False
562 else:
562 else:
563 prev_tracked = old_entry.tracked
563 prev_tracked = old_entry.tracked
564 if prev_tracked != wc_tracked:
564 if prev_tracked != wc_tracked:
565 self._dirty_tracked_set = True
565 self._dirty_tracked_set = True
566
566
567 self._map.reset_state(
567 self._map.reset_state(
568 filename,
568 filename,
569 wc_tracked,
569 wc_tracked,
570 p1_tracked,
570 p1_tracked,
571 p2_info=p2_info,
571 p2_info=p2_info,
572 has_meaningful_mtime=not possibly_dirty,
572 has_meaningful_mtime=not possibly_dirty,
573 parentfiledata=parentfiledata,
573 parentfiledata=parentfiledata,
574 )
574 )
575
575
576 def _check_new_tracked_filename(self, filename):
576 def _check_new_tracked_filename(self, filename):
577 scmutil.checkfilename(filename)
577 scmutil.checkfilename(filename)
578 if self._map.hastrackeddir(filename):
578 if self._map.hastrackeddir(filename):
579 msg = _(b'directory %r already in dirstate')
579 msg = _(b'directory %r already in dirstate')
580 msg %= pycompat.bytestr(filename)
580 msg %= pycompat.bytestr(filename)
581 raise error.Abort(msg)
581 raise error.Abort(msg)
582 # shadows
582 # shadows
583 for d in pathutil.finddirs(filename):
583 for d in pathutil.finddirs(filename):
584 if self._map.hastrackeddir(d):
584 if self._map.hastrackeddir(d):
585 break
585 break
586 entry = self._map.get(d)
586 entry = self._map.get(d)
587 if entry is not None and not entry.removed:
587 if entry is not None and not entry.removed:
588 msg = _(b'file %r in dirstate clashes with %r')
588 msg = _(b'file %r in dirstate clashes with %r')
589 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
589 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
590 raise error.Abort(msg)
590 raise error.Abort(msg)
591
591
592 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
592 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
593 if exists is None:
593 if exists is None:
594 exists = os.path.lexists(os.path.join(self._root, path))
594 exists = os.path.lexists(os.path.join(self._root, path))
595 if not exists:
595 if not exists:
596 # Maybe a path component exists
596 # Maybe a path component exists
597 if not ignoremissing and b'/' in path:
597 if not ignoremissing and b'/' in path:
598 d, f = path.rsplit(b'/', 1)
598 d, f = path.rsplit(b'/', 1)
599 d = self._normalize(d, False, ignoremissing, None)
599 d = self._normalize(d, False, ignoremissing, None)
600 folded = d + b"/" + f
600 folded = d + b"/" + f
601 else:
601 else:
602 # No path components, preserve original case
602 # No path components, preserve original case
603 folded = path
603 folded = path
604 else:
604 else:
605 # recursively normalize leading directory components
605 # recursively normalize leading directory components
606 # against dirstate
606 # against dirstate
607 if b'/' in normed:
607 if b'/' in normed:
608 d, f = normed.rsplit(b'/', 1)
608 d, f = normed.rsplit(b'/', 1)
609 d = self._normalize(d, False, ignoremissing, True)
609 d = self._normalize(d, False, ignoremissing, True)
610 r = self._root + b"/" + d
610 r = self._root + b"/" + d
611 folded = d + b"/" + util.fspath(f, r)
611 folded = d + b"/" + util.fspath(f, r)
612 else:
612 else:
613 folded = util.fspath(normed, self._root)
613 folded = util.fspath(normed, self._root)
614 storemap[normed] = folded
614 storemap[normed] = folded
615
615
616 return folded
616 return folded
617
617
618 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
618 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
619 normed = util.normcase(path)
619 normed = util.normcase(path)
620 folded = self._map.filefoldmap.get(normed, None)
620 folded = self._map.filefoldmap.get(normed, None)
621 if folded is None:
621 if folded is None:
622 if isknown:
622 if isknown:
623 folded = path
623 folded = path
624 else:
624 else:
625 folded = self._discoverpath(
625 folded = self._discoverpath(
626 path, normed, ignoremissing, exists, self._map.filefoldmap
626 path, normed, ignoremissing, exists, self._map.filefoldmap
627 )
627 )
628 return folded
628 return folded
629
629
630 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
630 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
631 normed = util.normcase(path)
631 normed = util.normcase(path)
632 folded = self._map.filefoldmap.get(normed, None)
632 folded = self._map.filefoldmap.get(normed, None)
633 if folded is None:
633 if folded is None:
634 folded = self._map.dirfoldmap.get(normed, None)
634 folded = self._map.dirfoldmap.get(normed, None)
635 if folded is None:
635 if folded is None:
636 if isknown:
636 if isknown:
637 folded = path
637 folded = path
638 else:
638 else:
639 # store discovered result in dirfoldmap so that future
639 # store discovered result in dirfoldmap so that future
640 # normalizefile calls don't start matching directories
640 # normalizefile calls don't start matching directories
641 folded = self._discoverpath(
641 folded = self._discoverpath(
642 path, normed, ignoremissing, exists, self._map.dirfoldmap
642 path, normed, ignoremissing, exists, self._map.dirfoldmap
643 )
643 )
644 return folded
644 return folded
645
645
646 def normalize(self, path, isknown=False, ignoremissing=False):
646 def normalize(self, path, isknown=False, ignoremissing=False):
647 """
647 """
648 normalize the case of a pathname when on a casefolding filesystem
648 normalize the case of a pathname when on a casefolding filesystem
649
649
650 isknown specifies whether the filename came from walking the
650 isknown specifies whether the filename came from walking the
651 disk, to avoid extra filesystem access.
651 disk, to avoid extra filesystem access.
652
652
653 If ignoremissing is True, missing path are returned
653 If ignoremissing is True, missing path are returned
654 unchanged. Otherwise, we try harder to normalize possibly
654 unchanged. Otherwise, we try harder to normalize possibly
655 existing path components.
655 existing path components.
656
656
657 The normalized case is determined based on the following precedence:
657 The normalized case is determined based on the following precedence:
658
658
659 - version of name already stored in the dirstate
659 - version of name already stored in the dirstate
660 - version of name stored on disk
660 - version of name stored on disk
661 - version provided via command arguments
661 - version provided via command arguments
662 """
662 """
663
663
664 if self._checkcase:
664 if self._checkcase:
665 return self._normalize(path, isknown, ignoremissing)
665 return self._normalize(path, isknown, ignoremissing)
666 return path
666 return path
667
667
668 def clear(self):
668 def clear(self):
669 self._map.clear()
669 self._map.clear()
670 self._dirty = True
670 self._dirty = True
671
671
672 def rebuild(self, parent, allfiles, changedfiles=None):
672 def rebuild(self, parent, allfiles, changedfiles=None):
673 if changedfiles is None:
673 if changedfiles is None:
674 # Rebuild entire dirstate
674 # Rebuild entire dirstate
675 to_lookup = allfiles
675 to_lookup = allfiles
676 to_drop = []
676 to_drop = []
677 self.clear()
677 self.clear()
678 elif len(changedfiles) < 10:
678 elif len(changedfiles) < 10:
679 # Avoid turning allfiles into a set, which can be expensive if it's
679 # Avoid turning allfiles into a set, which can be expensive if it's
680 # large.
680 # large.
681 to_lookup = []
681 to_lookup = []
682 to_drop = []
682 to_drop = []
683 for f in changedfiles:
683 for f in changedfiles:
684 if f in allfiles:
684 if f in allfiles:
685 to_lookup.append(f)
685 to_lookup.append(f)
686 else:
686 else:
687 to_drop.append(f)
687 to_drop.append(f)
688 else:
688 else:
689 changedfilesset = set(changedfiles)
689 changedfilesset = set(changedfiles)
690 to_lookup = changedfilesset & set(allfiles)
690 to_lookup = changedfilesset & set(allfiles)
691 to_drop = changedfilesset - to_lookup
691 to_drop = changedfilesset - to_lookup
692
692
693 if self._origpl is None:
693 if self._origpl is None:
694 self._origpl = self._pl
694 self._origpl = self._pl
695 self._map.setparents(parent, self._nodeconstants.nullid)
695 self._map.setparents(parent, self._nodeconstants.nullid)
696
696
697 for f in to_lookup:
697 for f in to_lookup:
698
698
699 if self.in_merge:
699 if self.in_merge:
700 self.set_tracked(f)
700 self.set_tracked(f)
701 else:
701 else:
702 self._map.reset_state(
702 self._map.reset_state(
703 f,
703 f,
704 wc_tracked=True,
704 wc_tracked=True,
705 p1_tracked=True,
705 p1_tracked=True,
706 )
706 )
707 for f in to_drop:
707 for f in to_drop:
708 self._map.reset_state(f)
708 self._map.reset_state(f)
709
709
710 self._dirty = True
710 self._dirty = True
711
711
712 def identity(self):
712 def identity(self):
713 """Return identity of dirstate itself to detect changing in storage
713 """Return identity of dirstate itself to detect changing in storage
714
714
715 If identity of previous dirstate is equal to this, writing
715 If identity of previous dirstate is equal to this, writing
716 changes based on the former dirstate out can keep consistency.
716 changes based on the former dirstate out can keep consistency.
717 """
717 """
718 return self._map.identity
718 return self._map.identity
719
719
720 def write(self, tr):
720 def write(self, tr):
721 if not self._dirty:
721 if not self._dirty:
722 return
722 return
723
723
724 write_key = self._use_tracked_hint and self._dirty_tracked_set
724 write_key = self._use_tracked_hint and self._dirty_tracked_set
725 if tr:
725 if tr:
726 # delay writing in-memory changes out
726 # delay writing in-memory changes out
727 tr.addfilegenerator(
727 tr.addfilegenerator(
728 b'dirstate-1-main',
728 b'dirstate-1-main',
729 (self._filename,),
729 (self._filename,),
730 lambda f: self._writedirstate(tr, f),
730 lambda f: self._writedirstate(tr, f),
731 location=b'plain',
731 location=b'plain',
732 post_finalize=True,
732 post_finalize=True,
733 )
733 )
734 if write_key:
734 if write_key:
735 tr.addfilegenerator(
735 tr.addfilegenerator(
736 b'dirstate-2-key-post',
736 b'dirstate-2-key-post',
737 (self._filename_th,),
737 (self._filename_th,),
738 lambda f: self._write_tracked_hint(tr, f),
738 lambda f: self._write_tracked_hint(tr, f),
739 location=b'plain',
739 location=b'plain',
740 post_finalize=True,
740 post_finalize=True,
741 )
741 )
742 return
742 return
743
743
744 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
744 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
745 with file(self._filename) as f:
745 with file(self._filename) as f:
746 self._writedirstate(tr, f)
746 self._writedirstate(tr, f)
747 if write_key:
747 if write_key:
748 # we update the key-file after writing to make sure reader have a
748 # we update the key-file after writing to make sure reader have a
749 # key that match the newly written content
749 # key that match the newly written content
750 with file(self._filename_th) as f:
750 with file(self._filename_th) as f:
751 self._write_tracked_hint(tr, f)
751 self._write_tracked_hint(tr, f)
752
752
753 def delete_tracked_hint(self):
753 def delete_tracked_hint(self):
754 """remove the tracked_hint file
754 """remove the tracked_hint file
755
755
756 To be used by format downgrades operation"""
756 To be used by format downgrades operation"""
757 self._opener.unlink(self._filename_th)
757 self._opener.unlink(self._filename_th)
758 self._use_tracked_hint = False
758 self._use_tracked_hint = False
759
759
760 def addparentchangecallback(self, category, callback):
760 def addparentchangecallback(self, category, callback):
761 """add a callback to be called when the wd parents are changed
761 """add a callback to be called when the wd parents are changed
762
762
763 Callback will be called with the following arguments:
763 Callback will be called with the following arguments:
764 dirstate, (oldp1, oldp2), (newp1, newp2)
764 dirstate, (oldp1, oldp2), (newp1, newp2)
765
765
766 Category is a unique identifier to allow overwriting an old callback
766 Category is a unique identifier to allow overwriting an old callback
767 with a newer callback.
767 with a newer callback.
768 """
768 """
769 self._plchangecallbacks[category] = callback
769 self._plchangecallbacks[category] = callback
770
770
771 def _writedirstate(self, tr, st):
771 def _writedirstate(self, tr, st):
772 # notify callbacks about parents change
772 # notify callbacks about parents change
773 if self._origpl is not None and self._origpl != self._pl:
773 if self._origpl is not None and self._origpl != self._pl:
774 for c, callback in sorted(self._plchangecallbacks.items()):
774 for c, callback in sorted(self._plchangecallbacks.items()):
775 callback(self, self._origpl, self._pl)
775 callback(self, self._origpl, self._pl)
776 self._origpl = None
776 self._origpl = None
777 self._map.write(tr, st)
777 self._map.write(tr, st)
778 self._dirty = False
778 self._dirty = False
779 self._dirty_tracked_set = False
779 self._dirty_tracked_set = False
780
780
781 def _write_tracked_hint(self, tr, f):
781 def _write_tracked_hint(self, tr, f):
782 key = node.hex(uuid.uuid4().bytes)
782 key = node.hex(uuid.uuid4().bytes)
783 f.write(b"1\n%s\n" % key) # 1 is the format version
783 f.write(b"1\n%s\n" % key) # 1 is the format version
784
784
785 def _dirignore(self, f):
785 def _dirignore(self, f):
786 if self._ignore(f):
786 if self._ignore(f):
787 return True
787 return True
788 for p in pathutil.finddirs(f):
788 for p in pathutil.finddirs(f):
789 if self._ignore(p):
789 if self._ignore(p):
790 return True
790 return True
791 return False
791 return False
792
792
793 def _ignorefiles(self):
793 def _ignorefiles(self):
794 files = []
794 files = []
795 if os.path.exists(self._join(b'.hgignore')):
795 if os.path.exists(self._join(b'.hgignore')):
796 files.append(self._join(b'.hgignore'))
796 files.append(self._join(b'.hgignore'))
797 for name, path in self._ui.configitems(b"ui"):
797 for name, path in self._ui.configitems(b"ui"):
798 if name == b'ignore' or name.startswith(b'ignore.'):
798 if name == b'ignore' or name.startswith(b'ignore.'):
799 # we need to use os.path.join here rather than self._join
799 # we need to use os.path.join here rather than self._join
800 # because path is arbitrary and user-specified
800 # because path is arbitrary and user-specified
801 files.append(os.path.join(self._rootdir, util.expandpath(path)))
801 files.append(os.path.join(self._rootdir, util.expandpath(path)))
802 return files
802 return files
803
803
804 def _ignorefileandline(self, f):
804 def _ignorefileandline(self, f):
805 files = collections.deque(self._ignorefiles())
805 files = collections.deque(self._ignorefiles())
806 visited = set()
806 visited = set()
807 while files:
807 while files:
808 i = files.popleft()
808 i = files.popleft()
809 patterns = matchmod.readpatternfile(
809 patterns = matchmod.readpatternfile(
810 i, self._ui.warn, sourceinfo=True
810 i, self._ui.warn, sourceinfo=True
811 )
811 )
812 for pattern, lineno, line in patterns:
812 for pattern, lineno, line in patterns:
813 kind, p = matchmod._patsplit(pattern, b'glob')
813 kind, p = matchmod._patsplit(pattern, b'glob')
814 if kind == b"subinclude":
814 if kind == b"subinclude":
815 if p not in visited:
815 if p not in visited:
816 files.append(p)
816 files.append(p)
817 continue
817 continue
818 m = matchmod.match(
818 m = matchmod.match(
819 self._root, b'', [], [pattern], warn=self._ui.warn
819 self._root, b'', [], [pattern], warn=self._ui.warn
820 )
820 )
821 if m(f):
821 if m(f):
822 return (i, lineno, line)
822 return (i, lineno, line)
823 visited.add(i)
823 visited.add(i)
824 return (None, -1, b"")
824 return (None, -1, b"")
825
825
826 def _walkexplicit(self, match, subrepos):
826 def _walkexplicit(self, match, subrepos):
827 """Get stat data about the files explicitly specified by match.
827 """Get stat data about the files explicitly specified by match.
828
828
829 Return a triple (results, dirsfound, dirsnotfound).
829 Return a triple (results, dirsfound, dirsnotfound).
830 - results is a mapping from filename to stat result. It also contains
830 - results is a mapping from filename to stat result. It also contains
831 listings mapping subrepos and .hg to None.
831 listings mapping subrepos and .hg to None.
832 - dirsfound is a list of files found to be directories.
832 - dirsfound is a list of files found to be directories.
833 - dirsnotfound is a list of files that the dirstate thinks are
833 - dirsnotfound is a list of files that the dirstate thinks are
834 directories and that were not found."""
834 directories and that were not found."""
835
835
836 def badtype(mode):
836 def badtype(mode):
837 kind = _(b'unknown')
837 kind = _(b'unknown')
838 if stat.S_ISCHR(mode):
838 if stat.S_ISCHR(mode):
839 kind = _(b'character device')
839 kind = _(b'character device')
840 elif stat.S_ISBLK(mode):
840 elif stat.S_ISBLK(mode):
841 kind = _(b'block device')
841 kind = _(b'block device')
842 elif stat.S_ISFIFO(mode):
842 elif stat.S_ISFIFO(mode):
843 kind = _(b'fifo')
843 kind = _(b'fifo')
844 elif stat.S_ISSOCK(mode):
844 elif stat.S_ISSOCK(mode):
845 kind = _(b'socket')
845 kind = _(b'socket')
846 elif stat.S_ISDIR(mode):
846 elif stat.S_ISDIR(mode):
847 kind = _(b'directory')
847 kind = _(b'directory')
848 return _(b'unsupported file type (type is %s)') % kind
848 return _(b'unsupported file type (type is %s)') % kind
849
849
850 badfn = match.bad
850 badfn = match.bad
851 dmap = self._map
851 dmap = self._map
852 lstat = os.lstat
852 lstat = os.lstat
853 getkind = stat.S_IFMT
853 getkind = stat.S_IFMT
854 dirkind = stat.S_IFDIR
854 dirkind = stat.S_IFDIR
855 regkind = stat.S_IFREG
855 regkind = stat.S_IFREG
856 lnkkind = stat.S_IFLNK
856 lnkkind = stat.S_IFLNK
857 join = self._join
857 join = self._join
858 dirsfound = []
858 dirsfound = []
859 foundadd = dirsfound.append
859 foundadd = dirsfound.append
860 dirsnotfound = []
860 dirsnotfound = []
861 notfoundadd = dirsnotfound.append
861 notfoundadd = dirsnotfound.append
862
862
863 if not match.isexact() and self._checkcase:
863 if not match.isexact() and self._checkcase:
864 normalize = self._normalize
864 normalize = self._normalize
865 else:
865 else:
866 normalize = None
866 normalize = None
867
867
868 files = sorted(match.files())
868 files = sorted(match.files())
869 subrepos.sort()
869 subrepos.sort()
870 i, j = 0, 0
870 i, j = 0, 0
871 while i < len(files) and j < len(subrepos):
871 while i < len(files) and j < len(subrepos):
872 subpath = subrepos[j] + b"/"
872 subpath = subrepos[j] + b"/"
873 if files[i] < subpath:
873 if files[i] < subpath:
874 i += 1
874 i += 1
875 continue
875 continue
876 while i < len(files) and files[i].startswith(subpath):
876 while i < len(files) and files[i].startswith(subpath):
877 del files[i]
877 del files[i]
878 j += 1
878 j += 1
879
879
880 if not files or b'' in files:
880 if not files or b'' in files:
881 files = [b'']
881 files = [b'']
882 # constructing the foldmap is expensive, so don't do it for the
882 # constructing the foldmap is expensive, so don't do it for the
883 # common case where files is ['']
883 # common case where files is ['']
884 normalize = None
884 normalize = None
885 results = dict.fromkeys(subrepos)
885 results = dict.fromkeys(subrepos)
886 results[b'.hg'] = None
886 results[b'.hg'] = None
887
887
888 for ff in files:
888 for ff in files:
889 if normalize:
889 if normalize:
890 nf = normalize(ff, False, True)
890 nf = normalize(ff, False, True)
891 else:
891 else:
892 nf = ff
892 nf = ff
893 if nf in results:
893 if nf in results:
894 continue
894 continue
895
895
896 try:
896 try:
897 st = lstat(join(nf))
897 st = lstat(join(nf))
898 kind = getkind(st.st_mode)
898 kind = getkind(st.st_mode)
899 if kind == dirkind:
899 if kind == dirkind:
900 if nf in dmap:
900 if nf in dmap:
901 # file replaced by dir on disk but still in dirstate
901 # file replaced by dir on disk but still in dirstate
902 results[nf] = None
902 results[nf] = None
903 foundadd((nf, ff))
903 foundadd((nf, ff))
904 elif kind == regkind or kind == lnkkind:
904 elif kind == regkind or kind == lnkkind:
905 results[nf] = st
905 results[nf] = st
906 else:
906 else:
907 badfn(ff, badtype(kind))
907 badfn(ff, badtype(kind))
908 if nf in dmap:
908 if nf in dmap:
909 results[nf] = None
909 results[nf] = None
910 except OSError as inst: # nf not found on disk - it is dirstate only
910 except OSError as inst: # nf not found on disk - it is dirstate only
911 if nf in dmap: # does it exactly match a missing file?
911 if nf in dmap: # does it exactly match a missing file?
912 results[nf] = None
912 results[nf] = None
913 else: # does it match a missing directory?
913 else: # does it match a missing directory?
914 if self._map.hasdir(nf):
914 if self._map.hasdir(nf):
915 notfoundadd(nf)
915 notfoundadd(nf)
916 else:
916 else:
917 badfn(ff, encoding.strtolocal(inst.strerror))
917 badfn(ff, encoding.strtolocal(inst.strerror))
918
918
919 # match.files() may contain explicitly-specified paths that shouldn't
919 # match.files() may contain explicitly-specified paths that shouldn't
920 # be taken; drop them from the list of files found. dirsfound/notfound
920 # be taken; drop them from the list of files found. dirsfound/notfound
921 # aren't filtered here because they will be tested later.
921 # aren't filtered here because they will be tested later.
922 if match.anypats():
922 if match.anypats():
923 for f in list(results):
923 for f in list(results):
924 if f == b'.hg' or f in subrepos:
924 if f == b'.hg' or f in subrepos:
925 # keep sentinel to disable further out-of-repo walks
925 # keep sentinel to disable further out-of-repo walks
926 continue
926 continue
927 if not match(f):
927 if not match(f):
928 del results[f]
928 del results[f]
929
929
930 # Case insensitive filesystems cannot rely on lstat() failing to detect
930 # Case insensitive filesystems cannot rely on lstat() failing to detect
931 # a case-only rename. Prune the stat object for any file that does not
931 # a case-only rename. Prune the stat object for any file that does not
932 # match the case in the filesystem, if there are multiple files that
932 # match the case in the filesystem, if there are multiple files that
933 # normalize to the same path.
933 # normalize to the same path.
934 if match.isexact() and self._checkcase:
934 if match.isexact() and self._checkcase:
935 normed = {}
935 normed = {}
936
936
937 for f, st in results.items():
937 for f, st in results.items():
938 if st is None:
938 if st is None:
939 continue
939 continue
940
940
941 nc = util.normcase(f)
941 nc = util.normcase(f)
942 paths = normed.get(nc)
942 paths = normed.get(nc)
943
943
944 if paths is None:
944 if paths is None:
945 paths = set()
945 paths = set()
946 normed[nc] = paths
946 normed[nc] = paths
947
947
948 paths.add(f)
948 paths.add(f)
949
949
950 for norm, paths in normed.items():
950 for norm, paths in normed.items():
951 if len(paths) > 1:
951 if len(paths) > 1:
952 for path in paths:
952 for path in paths:
953 folded = self._discoverpath(
953 folded = self._discoverpath(
954 path, norm, True, None, self._map.dirfoldmap
954 path, norm, True, None, self._map.dirfoldmap
955 )
955 )
956 if path != folded:
956 if path != folded:
957 results[path] = None
957 results[path] = None
958
958
959 return results, dirsfound, dirsnotfound
959 return results, dirsfound, dirsnotfound
960
960
961 def walk(self, match, subrepos, unknown, ignored, full=True):
961 def walk(self, match, subrepos, unknown, ignored, full=True):
962 """
962 """
963 Walk recursively through the directory tree, finding all files
963 Walk recursively through the directory tree, finding all files
964 matched by match.
964 matched by match.
965
965
966 If full is False, maybe skip some known-clean files.
966 If full is False, maybe skip some known-clean files.
967
967
968 Return a dict mapping filename to stat-like object (either
968 Return a dict mapping filename to stat-like object (either
969 mercurial.osutil.stat instance or return value of os.stat()).
969 mercurial.osutil.stat instance or return value of os.stat()).
970
970
971 """
971 """
972 # full is a flag that extensions that hook into walk can use -- this
972 # full is a flag that extensions that hook into walk can use -- this
973 # implementation doesn't use it at all. This satisfies the contract
973 # implementation doesn't use it at all. This satisfies the contract
974 # because we only guarantee a "maybe".
974 # because we only guarantee a "maybe".
975
975
976 if ignored:
976 if ignored:
977 ignore = util.never
977 ignore = util.never
978 dirignore = util.never
978 dirignore = util.never
979 elif unknown:
979 elif unknown:
980 ignore = self._ignore
980 ignore = self._ignore
981 dirignore = self._dirignore
981 dirignore = self._dirignore
982 else:
982 else:
983 # if not unknown and not ignored, drop dir recursion and step 2
983 # if not unknown and not ignored, drop dir recursion and step 2
984 ignore = util.always
984 ignore = util.always
985 dirignore = util.always
985 dirignore = util.always
986
986
987 if self._sparsematchfn is not None:
988 em = matchmod.exact(match.files())
989 sm = matchmod.unionmatcher([self._sparsematcher, em])
990 match = matchmod.intersectmatchers(match, sm)
991
987 matchfn = match.matchfn
992 matchfn = match.matchfn
988 matchalways = match.always()
993 matchalways = match.always()
989 matchtdir = match.traversedir
994 matchtdir = match.traversedir
990 dmap = self._map
995 dmap = self._map
991 listdir = util.listdir
996 listdir = util.listdir
992 lstat = os.lstat
997 lstat = os.lstat
993 dirkind = stat.S_IFDIR
998 dirkind = stat.S_IFDIR
994 regkind = stat.S_IFREG
999 regkind = stat.S_IFREG
995 lnkkind = stat.S_IFLNK
1000 lnkkind = stat.S_IFLNK
996 join = self._join
1001 join = self._join
997
1002
998 exact = skipstep3 = False
1003 exact = skipstep3 = False
999 if match.isexact(): # match.exact
1004 if match.isexact(): # match.exact
1000 exact = True
1005 exact = True
1001 dirignore = util.always # skip step 2
1006 dirignore = util.always # skip step 2
1002 elif match.prefix(): # match.match, no patterns
1007 elif match.prefix(): # match.match, no patterns
1003 skipstep3 = True
1008 skipstep3 = True
1004
1009
1005 if not exact and self._checkcase:
1010 if not exact and self._checkcase:
1006 normalize = self._normalize
1011 normalize = self._normalize
1007 normalizefile = self._normalizefile
1012 normalizefile = self._normalizefile
1008 skipstep3 = False
1013 skipstep3 = False
1009 else:
1014 else:
1010 normalize = self._normalize
1015 normalize = self._normalize
1011 normalizefile = None
1016 normalizefile = None
1012
1017
1013 # step 1: find all explicit files
1018 # step 1: find all explicit files
1014 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1019 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1015 if matchtdir:
1020 if matchtdir:
1016 for d in work:
1021 for d in work:
1017 matchtdir(d[0])
1022 matchtdir(d[0])
1018 for d in dirsnotfound:
1023 for d in dirsnotfound:
1019 matchtdir(d)
1024 matchtdir(d)
1020
1025
1021 skipstep3 = skipstep3 and not (work or dirsnotfound)
1026 skipstep3 = skipstep3 and not (work or dirsnotfound)
1022 work = [d for d in work if not dirignore(d[0])]
1027 work = [d for d in work if not dirignore(d[0])]
1023
1028
1024 # step 2: visit subdirectories
1029 # step 2: visit subdirectories
1025 def traverse(work, alreadynormed):
1030 def traverse(work, alreadynormed):
1026 wadd = work.append
1031 wadd = work.append
1027 while work:
1032 while work:
1028 tracing.counter('dirstate.walk work', len(work))
1033 tracing.counter('dirstate.walk work', len(work))
1029 nd = work.pop()
1034 nd = work.pop()
1030 visitentries = match.visitchildrenset(nd)
1035 visitentries = match.visitchildrenset(nd)
1031 if not visitentries:
1036 if not visitentries:
1032 continue
1037 continue
1033 if visitentries == b'this' or visitentries == b'all':
1038 if visitentries == b'this' or visitentries == b'all':
1034 visitentries = None
1039 visitentries = None
1035 skip = None
1040 skip = None
1036 if nd != b'':
1041 if nd != b'':
1037 skip = b'.hg'
1042 skip = b'.hg'
1038 try:
1043 try:
1039 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1044 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1040 entries = listdir(join(nd), stat=True, skip=skip)
1045 entries = listdir(join(nd), stat=True, skip=skip)
1041 except (PermissionError, FileNotFoundError) as inst:
1046 except (PermissionError, FileNotFoundError) as inst:
1042 match.bad(
1047 match.bad(
1043 self.pathto(nd), encoding.strtolocal(inst.strerror)
1048 self.pathto(nd), encoding.strtolocal(inst.strerror)
1044 )
1049 )
1045 continue
1050 continue
1046 for f, kind, st in entries:
1051 for f, kind, st in entries:
1047 # Some matchers may return files in the visitentries set,
1052 # Some matchers may return files in the visitentries set,
1048 # instead of 'this', if the matcher explicitly mentions them
1053 # instead of 'this', if the matcher explicitly mentions them
1049 # and is not an exactmatcher. This is acceptable; we do not
1054 # and is not an exactmatcher. This is acceptable; we do not
1050 # make any hard assumptions about file-or-directory below
1055 # make any hard assumptions about file-or-directory below
1051 # based on the presence of `f` in visitentries. If
1056 # based on the presence of `f` in visitentries. If
1052 # visitchildrenset returned a set, we can always skip the
1057 # visitchildrenset returned a set, we can always skip the
1053 # entries *not* in the set it provided regardless of whether
1058 # entries *not* in the set it provided regardless of whether
1054 # they're actually a file or a directory.
1059 # they're actually a file or a directory.
1055 if visitentries and f not in visitentries:
1060 if visitentries and f not in visitentries:
1056 continue
1061 continue
1057 if normalizefile:
1062 if normalizefile:
1058 # even though f might be a directory, we're only
1063 # even though f might be a directory, we're only
1059 # interested in comparing it to files currently in the
1064 # interested in comparing it to files currently in the
1060 # dmap -- therefore normalizefile is enough
1065 # dmap -- therefore normalizefile is enough
1061 nf = normalizefile(
1066 nf = normalizefile(
1062 nd and (nd + b"/" + f) or f, True, True
1067 nd and (nd + b"/" + f) or f, True, True
1063 )
1068 )
1064 else:
1069 else:
1065 nf = nd and (nd + b"/" + f) or f
1070 nf = nd and (nd + b"/" + f) or f
1066 if nf not in results:
1071 if nf not in results:
1067 if kind == dirkind:
1072 if kind == dirkind:
1068 if not ignore(nf):
1073 if not ignore(nf):
1069 if matchtdir:
1074 if matchtdir:
1070 matchtdir(nf)
1075 matchtdir(nf)
1071 wadd(nf)
1076 wadd(nf)
1072 if nf in dmap and (matchalways or matchfn(nf)):
1077 if nf in dmap and (matchalways or matchfn(nf)):
1073 results[nf] = None
1078 results[nf] = None
1074 elif kind == regkind or kind == lnkkind:
1079 elif kind == regkind or kind == lnkkind:
1075 if nf in dmap:
1080 if nf in dmap:
1076 if matchalways or matchfn(nf):
1081 if matchalways or matchfn(nf):
1077 results[nf] = st
1082 results[nf] = st
1078 elif (matchalways or matchfn(nf)) and not ignore(
1083 elif (matchalways or matchfn(nf)) and not ignore(
1079 nf
1084 nf
1080 ):
1085 ):
1081 # unknown file -- normalize if necessary
1086 # unknown file -- normalize if necessary
1082 if not alreadynormed:
1087 if not alreadynormed:
1083 nf = normalize(nf, False, True)
1088 nf = normalize(nf, False, True)
1084 results[nf] = st
1089 results[nf] = st
1085 elif nf in dmap and (matchalways or matchfn(nf)):
1090 elif nf in dmap and (matchalways or matchfn(nf)):
1086 results[nf] = None
1091 results[nf] = None
1087
1092
1088 for nd, d in work:
1093 for nd, d in work:
1089 # alreadynormed means that processwork doesn't have to do any
1094 # alreadynormed means that processwork doesn't have to do any
1090 # expensive directory normalization
1095 # expensive directory normalization
1091 alreadynormed = not normalize or nd == d
1096 alreadynormed = not normalize or nd == d
1092 traverse([d], alreadynormed)
1097 traverse([d], alreadynormed)
1093
1098
1094 for s in subrepos:
1099 for s in subrepos:
1095 del results[s]
1100 del results[s]
1096 del results[b'.hg']
1101 del results[b'.hg']
1097
1102
1098 # step 3: visit remaining files from dmap
1103 # step 3: visit remaining files from dmap
1099 if not skipstep3 and not exact:
1104 if not skipstep3 and not exact:
1100 # If a dmap file is not in results yet, it was either
1105 # If a dmap file is not in results yet, it was either
1101 # a) not matching matchfn b) ignored, c) missing, or d) under a
1106 # a) not matching matchfn b) ignored, c) missing, or d) under a
1102 # symlink directory.
1107 # symlink directory.
1103 if not results and matchalways:
1108 if not results and matchalways:
1104 visit = [f for f in dmap]
1109 visit = [f for f in dmap]
1105 else:
1110 else:
1106 visit = [f for f in dmap if f not in results and matchfn(f)]
1111 visit = [f for f in dmap if f not in results and matchfn(f)]
1107 visit.sort()
1112 visit.sort()
1108
1113
1109 if unknown:
1114 if unknown:
1110 # unknown == True means we walked all dirs under the roots
1115 # unknown == True means we walked all dirs under the roots
1111 # that wasn't ignored, and everything that matched was stat'ed
1116 # that wasn't ignored, and everything that matched was stat'ed
1112 # and is already in results.
1117 # and is already in results.
1113 # The rest must thus be ignored or under a symlink.
1118 # The rest must thus be ignored or under a symlink.
1114 audit_path = pathutil.pathauditor(self._root, cached=True)
1119 audit_path = pathutil.pathauditor(self._root, cached=True)
1115
1120
1116 for nf in iter(visit):
1121 for nf in iter(visit):
1117 # If a stat for the same file was already added with a
1122 # If a stat for the same file was already added with a
1118 # different case, don't add one for this, since that would
1123 # different case, don't add one for this, since that would
1119 # make it appear as if the file exists under both names
1124 # make it appear as if the file exists under both names
1120 # on disk.
1125 # on disk.
1121 if (
1126 if (
1122 normalizefile
1127 normalizefile
1123 and normalizefile(nf, True, True) in results
1128 and normalizefile(nf, True, True) in results
1124 ):
1129 ):
1125 results[nf] = None
1130 results[nf] = None
1126 # Report ignored items in the dmap as long as they are not
1131 # Report ignored items in the dmap as long as they are not
1127 # under a symlink directory.
1132 # under a symlink directory.
1128 elif audit_path.check(nf):
1133 elif audit_path.check(nf):
1129 try:
1134 try:
1130 results[nf] = lstat(join(nf))
1135 results[nf] = lstat(join(nf))
1131 # file was just ignored, no links, and exists
1136 # file was just ignored, no links, and exists
1132 except OSError:
1137 except OSError:
1133 # file doesn't exist
1138 # file doesn't exist
1134 results[nf] = None
1139 results[nf] = None
1135 else:
1140 else:
1136 # It's either missing or under a symlink directory
1141 # It's either missing or under a symlink directory
1137 # which we in this case report as missing
1142 # which we in this case report as missing
1138 results[nf] = None
1143 results[nf] = None
1139 else:
1144 else:
1140 # We may not have walked the full directory tree above,
1145 # We may not have walked the full directory tree above,
1141 # so stat and check everything we missed.
1146 # so stat and check everything we missed.
1142 iv = iter(visit)
1147 iv = iter(visit)
1143 for st in util.statfiles([join(i) for i in visit]):
1148 for st in util.statfiles([join(i) for i in visit]):
1144 results[next(iv)] = st
1149 results[next(iv)] = st
1145 return results
1150 return results
1146
1151
1147 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1152 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1148 # Force Rayon (Rust parallelism library) to respect the number of
1153 # Force Rayon (Rust parallelism library) to respect the number of
1149 # workers. This is a temporary workaround until Rust code knows
1154 # workers. This is a temporary workaround until Rust code knows
1150 # how to read the config file.
1155 # how to read the config file.
1151 numcpus = self._ui.configint(b"worker", b"numcpus")
1156 numcpus = self._ui.configint(b"worker", b"numcpus")
1152 if numcpus is not None:
1157 if numcpus is not None:
1153 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1158 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1154
1159
1155 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1160 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1156 if not workers_enabled:
1161 if not workers_enabled:
1157 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1162 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1158
1163
1159 (
1164 (
1160 lookup,
1165 lookup,
1161 modified,
1166 modified,
1162 added,
1167 added,
1163 removed,
1168 removed,
1164 deleted,
1169 deleted,
1165 clean,
1170 clean,
1166 ignored,
1171 ignored,
1167 unknown,
1172 unknown,
1168 warnings,
1173 warnings,
1169 bad,
1174 bad,
1170 traversed,
1175 traversed,
1171 dirty,
1176 dirty,
1172 ) = rustmod.status(
1177 ) = rustmod.status(
1173 self._map._map,
1178 self._map._map,
1174 matcher,
1179 matcher,
1175 self._rootdir,
1180 self._rootdir,
1176 self._ignorefiles(),
1181 self._ignorefiles(),
1177 self._checkexec,
1182 self._checkexec,
1178 bool(list_clean),
1183 bool(list_clean),
1179 bool(list_ignored),
1184 bool(list_ignored),
1180 bool(list_unknown),
1185 bool(list_unknown),
1181 bool(matcher.traversedir),
1186 bool(matcher.traversedir),
1182 )
1187 )
1183
1188
1184 self._dirty |= dirty
1189 self._dirty |= dirty
1185
1190
1186 if matcher.traversedir:
1191 if matcher.traversedir:
1187 for dir in traversed:
1192 for dir in traversed:
1188 matcher.traversedir(dir)
1193 matcher.traversedir(dir)
1189
1194
1190 if self._ui.warn:
1195 if self._ui.warn:
1191 for item in warnings:
1196 for item in warnings:
1192 if isinstance(item, tuple):
1197 if isinstance(item, tuple):
1193 file_path, syntax = item
1198 file_path, syntax = item
1194 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1199 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1195 file_path,
1200 file_path,
1196 syntax,
1201 syntax,
1197 )
1202 )
1198 self._ui.warn(msg)
1203 self._ui.warn(msg)
1199 else:
1204 else:
1200 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1205 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1201 self._ui.warn(
1206 self._ui.warn(
1202 msg
1207 msg
1203 % (
1208 % (
1204 pathutil.canonpath(
1209 pathutil.canonpath(
1205 self._rootdir, self._rootdir, item
1210 self._rootdir, self._rootdir, item
1206 ),
1211 ),
1207 b"No such file or directory",
1212 b"No such file or directory",
1208 )
1213 )
1209 )
1214 )
1210
1215
1211 for (fn, message) in bad:
1216 for (fn, message) in bad:
1212 matcher.bad(fn, encoding.strtolocal(message))
1217 matcher.bad(fn, encoding.strtolocal(message))
1213
1218
1214 status = scmutil.status(
1219 status = scmutil.status(
1215 modified=modified,
1220 modified=modified,
1216 added=added,
1221 added=added,
1217 removed=removed,
1222 removed=removed,
1218 deleted=deleted,
1223 deleted=deleted,
1219 unknown=unknown,
1224 unknown=unknown,
1220 ignored=ignored,
1225 ignored=ignored,
1221 clean=clean,
1226 clean=clean,
1222 )
1227 )
1223 return (lookup, status)
1228 return (lookup, status)
1224
1229
1225 def status(self, match, subrepos, ignored, clean, unknown):
1230 def status(self, match, subrepos, ignored, clean, unknown):
1226 """Determine the status of the working copy relative to the
1231 """Determine the status of the working copy relative to the
1227 dirstate and return a pair of (unsure, status), where status is of type
1232 dirstate and return a pair of (unsure, status), where status is of type
1228 scmutil.status and:
1233 scmutil.status and:
1229
1234
1230 unsure:
1235 unsure:
1231 files that might have been modified since the dirstate was
1236 files that might have been modified since the dirstate was
1232 written, but need to be read to be sure (size is the same
1237 written, but need to be read to be sure (size is the same
1233 but mtime differs)
1238 but mtime differs)
1234 status.modified:
1239 status.modified:
1235 files that have definitely been modified since the dirstate
1240 files that have definitely been modified since the dirstate
1236 was written (different size or mode)
1241 was written (different size or mode)
1237 status.clean:
1242 status.clean:
1238 files that have definitely not been modified since the
1243 files that have definitely not been modified since the
1239 dirstate was written
1244 dirstate was written
1240 """
1245 """
1241 listignored, listclean, listunknown = ignored, clean, unknown
1246 listignored, listclean, listunknown = ignored, clean, unknown
1242 lookup, modified, added, unknown, ignored = [], [], [], [], []
1247 lookup, modified, added, unknown, ignored = [], [], [], [], []
1243 removed, deleted, clean = [], [], []
1248 removed, deleted, clean = [], [], []
1244
1249
1245 dmap = self._map
1250 dmap = self._map
1246 dmap.preload()
1251 dmap.preload()
1247
1252
1248 use_rust = True
1253 use_rust = True
1249
1254
1250 allowed_matchers = (
1255 allowed_matchers = (
1251 matchmod.alwaysmatcher,
1256 matchmod.alwaysmatcher,
1252 matchmod.exactmatcher,
1257 matchmod.exactmatcher,
1253 matchmod.includematcher,
1258 matchmod.includematcher,
1254 matchmod.intersectionmatcher,
1259 matchmod.intersectionmatcher,
1255 matchmod.nevermatcher,
1260 matchmod.nevermatcher,
1256 matchmod.unionmatcher,
1261 matchmod.unionmatcher,
1257 )
1262 )
1258
1263
1259 if rustmod is None:
1264 if rustmod is None:
1260 use_rust = False
1265 use_rust = False
1261 elif self._checkcase:
1266 elif self._checkcase:
1262 # Case-insensitive filesystems are not handled yet
1267 # Case-insensitive filesystems are not handled yet
1263 use_rust = False
1268 use_rust = False
1264 elif subrepos:
1269 elif subrepos:
1265 use_rust = False
1270 use_rust = False
1266 elif self._sparsematchfn is not None:
1271 elif self._sparsematchfn is not None:
1267 use_rust = False
1272 use_rust = False
1268 elif not isinstance(match, allowed_matchers):
1273 elif not isinstance(match, allowed_matchers):
1269 # Some matchers have yet to be implemented
1274 # Some matchers have yet to be implemented
1270 use_rust = False
1275 use_rust = False
1271
1276
1272 # Get the time from the filesystem so we can disambiguate files that
1277 # Get the time from the filesystem so we can disambiguate files that
1273 # appear modified in the present or future.
1278 # appear modified in the present or future.
1274 try:
1279 try:
1275 mtime_boundary = timestamp.get_fs_now(self._opener)
1280 mtime_boundary = timestamp.get_fs_now(self._opener)
1276 except OSError:
1281 except OSError:
1277 # In largefiles or readonly context
1282 # In largefiles or readonly context
1278 mtime_boundary = None
1283 mtime_boundary = None
1279
1284
1280 if use_rust:
1285 if use_rust:
1281 try:
1286 try:
1282 res = self._rust_status(
1287 res = self._rust_status(
1283 match, listclean, listignored, listunknown
1288 match, listclean, listignored, listunknown
1284 )
1289 )
1285 return res + (mtime_boundary,)
1290 return res + (mtime_boundary,)
1286 except rustmod.FallbackError:
1291 except rustmod.FallbackError:
1287 pass
1292 pass
1288
1293
1289 def noop(f):
1294 def noop(f):
1290 pass
1295 pass
1291
1296
1292 dcontains = dmap.__contains__
1297 dcontains = dmap.__contains__
1293 dget = dmap.__getitem__
1298 dget = dmap.__getitem__
1294 ladd = lookup.append # aka "unsure"
1299 ladd = lookup.append # aka "unsure"
1295 madd = modified.append
1300 madd = modified.append
1296 aadd = added.append
1301 aadd = added.append
1297 uadd = unknown.append if listunknown else noop
1302 uadd = unknown.append if listunknown else noop
1298 iadd = ignored.append if listignored else noop
1303 iadd = ignored.append if listignored else noop
1299 radd = removed.append
1304 radd = removed.append
1300 dadd = deleted.append
1305 dadd = deleted.append
1301 cadd = clean.append if listclean else noop
1306 cadd = clean.append if listclean else noop
1302 mexact = match.exact
1307 mexact = match.exact
1303 dirignore = self._dirignore
1308 dirignore = self._dirignore
1304 checkexec = self._checkexec
1309 checkexec = self._checkexec
1305 checklink = self._checklink
1310 checklink = self._checklink
1306 copymap = self._map.copymap
1311 copymap = self._map.copymap
1307
1312
1308 # We need to do full walks when either
1313 # We need to do full walks when either
1309 # - we're listing all clean files, or
1314 # - we're listing all clean files, or
1310 # - match.traversedir does something, because match.traversedir should
1315 # - match.traversedir does something, because match.traversedir should
1311 # be called for every dir in the working dir
1316 # be called for every dir in the working dir
1312 full = listclean or match.traversedir is not None
1317 full = listclean or match.traversedir is not None
1313 for fn, st in self.walk(
1318 for fn, st in self.walk(
1314 match, subrepos, listunknown, listignored, full=full
1319 match, subrepos, listunknown, listignored, full=full
1315 ).items():
1320 ).items():
1316 if not dcontains(fn):
1321 if not dcontains(fn):
1317 if (listignored or mexact(fn)) and dirignore(fn):
1322 if (listignored or mexact(fn)) and dirignore(fn):
1318 if listignored:
1323 if listignored:
1319 iadd(fn)
1324 iadd(fn)
1320 else:
1325 else:
1321 uadd(fn)
1326 uadd(fn)
1322 continue
1327 continue
1323
1328
1324 t = dget(fn)
1329 t = dget(fn)
1325 mode = t.mode
1330 mode = t.mode
1326 size = t.size
1331 size = t.size
1327
1332
1328 if not st and t.tracked:
1333 if not st and t.tracked:
1329 dadd(fn)
1334 dadd(fn)
1330 elif t.p2_info:
1335 elif t.p2_info:
1331 madd(fn)
1336 madd(fn)
1332 elif t.added:
1337 elif t.added:
1333 aadd(fn)
1338 aadd(fn)
1334 elif t.removed:
1339 elif t.removed:
1335 radd(fn)
1340 radd(fn)
1336 elif t.tracked:
1341 elif t.tracked:
1337 if not checklink and t.has_fallback_symlink:
1342 if not checklink and t.has_fallback_symlink:
1338 # If the file system does not support symlink, the mode
1343 # If the file system does not support symlink, the mode
1339 # might not be correctly stored in the dirstate, so do not
1344 # might not be correctly stored in the dirstate, so do not
1340 # trust it.
1345 # trust it.
1341 ladd(fn)
1346 ladd(fn)
1342 elif not checkexec and t.has_fallback_exec:
1347 elif not checkexec and t.has_fallback_exec:
1343 # If the file system does not support exec bits, the mode
1348 # If the file system does not support exec bits, the mode
1344 # might not be correctly stored in the dirstate, so do not
1349 # might not be correctly stored in the dirstate, so do not
1345 # trust it.
1350 # trust it.
1346 ladd(fn)
1351 ladd(fn)
1347 elif (
1352 elif (
1348 size >= 0
1353 size >= 0
1349 and (
1354 and (
1350 (size != st.st_size and size != st.st_size & _rangemask)
1355 (size != st.st_size and size != st.st_size & _rangemask)
1351 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1356 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1352 )
1357 )
1353 or fn in copymap
1358 or fn in copymap
1354 ):
1359 ):
1355 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1360 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1356 # issue6456: Size returned may be longer due to
1361 # issue6456: Size returned may be longer due to
1357 # encryption on EXT-4 fscrypt, undecided.
1362 # encryption on EXT-4 fscrypt, undecided.
1358 ladd(fn)
1363 ladd(fn)
1359 else:
1364 else:
1360 madd(fn)
1365 madd(fn)
1361 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1366 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1362 # There might be a change in the future if for example the
1367 # There might be a change in the future if for example the
1363 # internal clock is off, but this is a case where the issues
1368 # internal clock is off, but this is a case where the issues
1364 # the user would face would be a lot worse and there is
1369 # the user would face would be a lot worse and there is
1365 # nothing we can really do.
1370 # nothing we can really do.
1366 ladd(fn)
1371 ladd(fn)
1367 elif listclean:
1372 elif listclean:
1368 cadd(fn)
1373 cadd(fn)
1369 status = scmutil.status(
1374 status = scmutil.status(
1370 modified, added, removed, deleted, unknown, ignored, clean
1375 modified, added, removed, deleted, unknown, ignored, clean
1371 )
1376 )
1372 return (lookup, status, mtime_boundary)
1377 return (lookup, status, mtime_boundary)
1373
1378
1374 def matches(self, match):
1379 def matches(self, match):
1375 """
1380 """
1376 return files in the dirstate (in whatever state) filtered by match
1381 return files in the dirstate (in whatever state) filtered by match
1377 """
1382 """
1378 dmap = self._map
1383 dmap = self._map
1379 if rustmod is not None:
1384 if rustmod is not None:
1380 dmap = self._map._map
1385 dmap = self._map._map
1381
1386
1382 if match.always():
1387 if match.always():
1383 return dmap.keys()
1388 return dmap.keys()
1384 files = match.files()
1389 files = match.files()
1385 if match.isexact():
1390 if match.isexact():
1386 # fast path -- filter the other way around, since typically files is
1391 # fast path -- filter the other way around, since typically files is
1387 # much smaller than dmap
1392 # much smaller than dmap
1388 return [f for f in files if f in dmap]
1393 return [f for f in files if f in dmap]
1389 if match.prefix() and all(fn in dmap for fn in files):
1394 if match.prefix() and all(fn in dmap for fn in files):
1390 # fast path -- all the values are known to be files, so just return
1395 # fast path -- all the values are known to be files, so just return
1391 # that
1396 # that
1392 return list(files)
1397 return list(files)
1393 return [f for f in dmap if match(f)]
1398 return [f for f in dmap if match(f)]
1394
1399
1395 def _actualfilename(self, tr):
1400 def _actualfilename(self, tr):
1396 if tr:
1401 if tr:
1397 return self._pendingfilename
1402 return self._pendingfilename
1398 else:
1403 else:
1399 return self._filename
1404 return self._filename
1400
1405
1401 def savebackup(self, tr, backupname):
1406 def savebackup(self, tr, backupname):
1402 '''Save current dirstate into backup file'''
1407 '''Save current dirstate into backup file'''
1403 filename = self._actualfilename(tr)
1408 filename = self._actualfilename(tr)
1404 assert backupname != filename
1409 assert backupname != filename
1405
1410
1406 # use '_writedirstate' instead of 'write' to write changes certainly,
1411 # use '_writedirstate' instead of 'write' to write changes certainly,
1407 # because the latter omits writing out if transaction is running.
1412 # because the latter omits writing out if transaction is running.
1408 # output file will be used to create backup of dirstate at this point.
1413 # output file will be used to create backup of dirstate at this point.
1409 if self._dirty or not self._opener.exists(filename):
1414 if self._dirty or not self._opener.exists(filename):
1410 self._writedirstate(
1415 self._writedirstate(
1411 tr,
1416 tr,
1412 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1417 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1413 )
1418 )
1414
1419
1415 if tr:
1420 if tr:
1416 # ensure that subsequent tr.writepending returns True for
1421 # ensure that subsequent tr.writepending returns True for
1417 # changes written out above, even if dirstate is never
1422 # changes written out above, even if dirstate is never
1418 # changed after this
1423 # changed after this
1419 tr.addfilegenerator(
1424 tr.addfilegenerator(
1420 b'dirstate-1-main',
1425 b'dirstate-1-main',
1421 (self._filename,),
1426 (self._filename,),
1422 lambda f: self._writedirstate(tr, f),
1427 lambda f: self._writedirstate(tr, f),
1423 location=b'plain',
1428 location=b'plain',
1424 post_finalize=True,
1429 post_finalize=True,
1425 )
1430 )
1426
1431
1427 # ensure that pending file written above is unlinked at
1432 # ensure that pending file written above is unlinked at
1428 # failure, even if tr.writepending isn't invoked until the
1433 # failure, even if tr.writepending isn't invoked until the
1429 # end of this transaction
1434 # end of this transaction
1430 tr.registertmp(filename, location=b'plain')
1435 tr.registertmp(filename, location=b'plain')
1431
1436
1432 self._opener.tryunlink(backupname)
1437 self._opener.tryunlink(backupname)
1433 # hardlink backup is okay because _writedirstate is always called
1438 # hardlink backup is okay because _writedirstate is always called
1434 # with an "atomictemp=True" file.
1439 # with an "atomictemp=True" file.
1435 util.copyfile(
1440 util.copyfile(
1436 self._opener.join(filename),
1441 self._opener.join(filename),
1437 self._opener.join(backupname),
1442 self._opener.join(backupname),
1438 hardlink=True,
1443 hardlink=True,
1439 )
1444 )
1440
1445
1441 def restorebackup(self, tr, backupname):
1446 def restorebackup(self, tr, backupname):
1442 '''Restore dirstate by backup file'''
1447 '''Restore dirstate by backup file'''
1443 # this "invalidate()" prevents "wlock.release()" from writing
1448 # this "invalidate()" prevents "wlock.release()" from writing
1444 # changes of dirstate out after restoring from backup file
1449 # changes of dirstate out after restoring from backup file
1445 self.invalidate()
1450 self.invalidate()
1446 filename = self._actualfilename(tr)
1451 filename = self._actualfilename(tr)
1447 o = self._opener
1452 o = self._opener
1448 if util.samefile(o.join(backupname), o.join(filename)):
1453 if util.samefile(o.join(backupname), o.join(filename)):
1449 o.unlink(backupname)
1454 o.unlink(backupname)
1450 else:
1455 else:
1451 o.rename(backupname, filename, checkambig=True)
1456 o.rename(backupname, filename, checkambig=True)
1452
1457
1453 def clearbackup(self, tr, backupname):
1458 def clearbackup(self, tr, backupname):
1454 '''Clear backup file'''
1459 '''Clear backup file'''
1455 self._opener.unlink(backupname)
1460 self._opener.unlink(backupname)
1456
1461
1457 def verify(self, m1, m2):
1462 def verify(self, m1, m2):
1458 """check the dirstate content again the parent manifest and yield errors"""
1463 """check the dirstate content again the parent manifest and yield errors"""
1459 missing_from_p1 = b"%s in state %s, but not in manifest1\n"
1464 missing_from_p1 = b"%s in state %s, but not in manifest1\n"
1460 unexpected_in_p1 = b"%s in state %s, but also in manifest1\n"
1465 unexpected_in_p1 = b"%s in state %s, but also in manifest1\n"
1461 missing_from_ps = b"%s in state %s, but not in either manifest\n"
1466 missing_from_ps = b"%s in state %s, but not in either manifest\n"
1462 missing_from_ds = b"%s in manifest1, but listed as state %s\n"
1467 missing_from_ds = b"%s in manifest1, but listed as state %s\n"
1463 for f, entry in self.items():
1468 for f, entry in self.items():
1464 state = entry.state
1469 state = entry.state
1465 if state in b"nr" and f not in m1:
1470 if state in b"nr" and f not in m1:
1466 yield (missing_from_p1, f, state)
1471 yield (missing_from_p1, f, state)
1467 if state in b"a" and f in m1:
1472 if state in b"a" and f in m1:
1468 yield (unexpected_in_p1, f, state)
1473 yield (unexpected_in_p1, f, state)
1469 if state in b"m" and f not in m1 and f not in m2:
1474 if state in b"m" and f not in m1 and f not in m2:
1470 yield (missing_from_ps, f, state)
1475 yield (missing_from_ps, f, state)
1471 for f in m1:
1476 for f in m1:
1472 state = self.get_entry(f).state
1477 state = self.get_entry(f).state
1473 if state not in b"nrm":
1478 if state not in b"nrm":
1474 yield (missing_from_ds, f, state)
1479 yield (missing_from_ds, f, state)
General Comments 0
You need to be logged in to leave comments. Login now