##// END OF EJS Templates
sparse: directly inline the `set_tracked` and `copy` wrapping...
marmoute -
r50255:bd3519dc default
parent child Browse files
Show More
@@ -1,429 +1,393 b''
1 # sparse.py - allow sparse checkouts of the working directory
1 # sparse.py - allow sparse checkouts of the working directory
2 #
2 #
3 # Copyright 2014 Facebook, Inc.
3 # Copyright 2014 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """allow sparse checkouts of the working directory (EXPERIMENTAL)
8 """allow sparse checkouts of the working directory (EXPERIMENTAL)
9
9
10 (This extension is not yet protected by backwards compatibility
10 (This extension is not yet protected by backwards compatibility
11 guarantees. Any aspect may break in future releases until this
11 guarantees. Any aspect may break in future releases until this
12 notice is removed.)
12 notice is removed.)
13
13
14 This extension allows the working directory to only consist of a
14 This extension allows the working directory to only consist of a
15 subset of files for the revision. This allows specific files or
15 subset of files for the revision. This allows specific files or
16 directories to be explicitly included or excluded. Many repository
16 directories to be explicitly included or excluded. Many repository
17 operations have performance proportional to the number of files in
17 operations have performance proportional to the number of files in
18 the working directory. So only realizing a subset of files in the
18 the working directory. So only realizing a subset of files in the
19 working directory can improve performance.
19 working directory can improve performance.
20
20
21 Sparse Config Files
21 Sparse Config Files
22 -------------------
22 -------------------
23
23
24 The set of files that are part of a sparse checkout are defined by
24 The set of files that are part of a sparse checkout are defined by
25 a sparse config file. The file defines 3 things: includes (files to
25 a sparse config file. The file defines 3 things: includes (files to
26 include in the sparse checkout), excludes (files to exclude from the
26 include in the sparse checkout), excludes (files to exclude from the
27 sparse checkout), and profiles (links to other config files).
27 sparse checkout), and profiles (links to other config files).
28
28
29 The file format is newline delimited. Empty lines and lines beginning
29 The file format is newline delimited. Empty lines and lines beginning
30 with ``#`` are ignored.
30 with ``#`` are ignored.
31
31
32 Lines beginning with ``%include `` denote another sparse config file
32 Lines beginning with ``%include `` denote another sparse config file
33 to include. e.g. ``%include tests.sparse``. The filename is relative
33 to include. e.g. ``%include tests.sparse``. The filename is relative
34 to the repository root.
34 to the repository root.
35
35
36 The special lines ``[include]`` and ``[exclude]`` denote the section
36 The special lines ``[include]`` and ``[exclude]`` denote the section
37 for includes and excludes that follow, respectively. It is illegal to
37 for includes and excludes that follow, respectively. It is illegal to
38 have ``[include]`` after ``[exclude]``.
38 have ``[include]`` after ``[exclude]``.
39
39
40 Non-special lines resemble file patterns to be added to either includes
40 Non-special lines resemble file patterns to be added to either includes
41 or excludes. The syntax of these lines is documented by :hg:`help patterns`.
41 or excludes. The syntax of these lines is documented by :hg:`help patterns`.
42 Patterns are interpreted as ``glob:`` by default and match against the
42 Patterns are interpreted as ``glob:`` by default and match against the
43 root of the repository.
43 root of the repository.
44
44
45 Exclusion patterns take precedence over inclusion patterns. So even
45 Exclusion patterns take precedence over inclusion patterns. So even
46 if a file is explicitly included, an ``[exclude]`` entry can remove it.
46 if a file is explicitly included, an ``[exclude]`` entry can remove it.
47
47
48 For example, say you have a repository with 3 directories, ``frontend/``,
48 For example, say you have a repository with 3 directories, ``frontend/``,
49 ``backend/``, and ``tools/``. ``frontend/`` and ``backend/`` correspond
49 ``backend/``, and ``tools/``. ``frontend/`` and ``backend/`` correspond
50 to different projects and it is uncommon for someone working on one
50 to different projects and it is uncommon for someone working on one
51 to need the files for the other. But ``tools/`` contains files shared
51 to need the files for the other. But ``tools/`` contains files shared
52 between both projects. Your sparse config files may resemble::
52 between both projects. Your sparse config files may resemble::
53
53
54 # frontend.sparse
54 # frontend.sparse
55 frontend/**
55 frontend/**
56 tools/**
56 tools/**
57
57
58 # backend.sparse
58 # backend.sparse
59 backend/**
59 backend/**
60 tools/**
60 tools/**
61
61
62 Say the backend grows in size. Or there's a directory with thousands
62 Say the backend grows in size. Or there's a directory with thousands
63 of files you wish to exclude. You can modify the profile to exclude
63 of files you wish to exclude. You can modify the profile to exclude
64 certain files::
64 certain files::
65
65
66 [include]
66 [include]
67 backend/**
67 backend/**
68 tools/**
68 tools/**
69
69
70 [exclude]
70 [exclude]
71 tools/tests/**
71 tools/tests/**
72 """
72 """
73
73
74
74
75 from mercurial.i18n import _
75 from mercurial.i18n import _
76 from mercurial.pycompat import setattr
76 from mercurial.pycompat import setattr
77 from mercurial import (
77 from mercurial import (
78 cmdutil,
78 cmdutil,
79 commands,
79 commands,
80 dirstate,
81 error,
80 error,
82 extensions,
81 extensions,
83 logcmdutil,
82 logcmdutil,
84 merge as mergemod,
83 merge as mergemod,
85 pycompat,
84 pycompat,
86 registrar,
85 registrar,
87 sparse,
86 sparse,
88 util,
87 util,
89 )
88 )
90
89
91 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
90 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
92 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
91 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
93 # be specifying the version(s) of Mercurial they are tested with, or
92 # be specifying the version(s) of Mercurial they are tested with, or
94 # leave the attribute unspecified.
93 # leave the attribute unspecified.
95 testedwith = b'ships-with-hg-core'
94 testedwith = b'ships-with-hg-core'
96
95
97 cmdtable = {}
96 cmdtable = {}
98 command = registrar.command(cmdtable)
97 command = registrar.command(cmdtable)
99
98
100
99
101 def extsetup(ui):
100 def extsetup(ui):
102 sparse.enabled = True
101 sparse.enabled = True
103
102
104 _setupclone(ui)
103 _setupclone(ui)
105 _setuplog(ui)
104 _setuplog(ui)
106 _setupadd(ui)
105 _setupadd(ui)
107 _setupdirstate(ui)
108
106
109
107
110 def replacefilecache(cls, propname, replacement):
108 def replacefilecache(cls, propname, replacement):
111 """Replace a filecache property with a new class. This allows changing the
109 """Replace a filecache property with a new class. This allows changing the
112 cache invalidation condition."""
110 cache invalidation condition."""
113 origcls = cls
111 origcls = cls
114 assert callable(replacement)
112 assert callable(replacement)
115 while cls is not object:
113 while cls is not object:
116 if propname in cls.__dict__:
114 if propname in cls.__dict__:
117 orig = cls.__dict__[propname]
115 orig = cls.__dict__[propname]
118 setattr(cls, propname, replacement(orig))
116 setattr(cls, propname, replacement(orig))
119 break
117 break
120 cls = cls.__bases__[0]
118 cls = cls.__bases__[0]
121
119
122 if cls is object:
120 if cls is object:
123 raise AttributeError(
121 raise AttributeError(
124 _(b"type '%s' has no property '%s'") % (origcls, propname)
122 _(b"type '%s' has no property '%s'") % (origcls, propname)
125 )
123 )
126
124
127
125
128 def _setuplog(ui):
126 def _setuplog(ui):
129 entry = commands.table[b'log|history']
127 entry = commands.table[b'log|history']
130 entry[1].append(
128 entry[1].append(
131 (
129 (
132 b'',
130 b'',
133 b'sparse',
131 b'sparse',
134 None,
132 None,
135 b"limit to changesets affecting the sparse checkout",
133 b"limit to changesets affecting the sparse checkout",
136 )
134 )
137 )
135 )
138
136
139 def _initialrevs(orig, repo, wopts):
137 def _initialrevs(orig, repo, wopts):
140 revs = orig(repo, wopts)
138 revs = orig(repo, wopts)
141 if wopts.opts.get(b'sparse'):
139 if wopts.opts.get(b'sparse'):
142 sparsematch = sparse.matcher(repo)
140 sparsematch = sparse.matcher(repo)
143
141
144 def ctxmatch(rev):
142 def ctxmatch(rev):
145 ctx = repo[rev]
143 ctx = repo[rev]
146 return any(f for f in ctx.files() if sparsematch(f))
144 return any(f for f in ctx.files() if sparsematch(f))
147
145
148 revs = revs.filter(ctxmatch)
146 revs = revs.filter(ctxmatch)
149 return revs
147 return revs
150
148
151 extensions.wrapfunction(logcmdutil, b'_initialrevs', _initialrevs)
149 extensions.wrapfunction(logcmdutil, b'_initialrevs', _initialrevs)
152
150
153
151
154 def _clonesparsecmd(orig, ui, repo, *args, **opts):
152 def _clonesparsecmd(orig, ui, repo, *args, **opts):
155 include = opts.get('include')
153 include = opts.get('include')
156 exclude = opts.get('exclude')
154 exclude = opts.get('exclude')
157 enableprofile = opts.get('enable_profile')
155 enableprofile = opts.get('enable_profile')
158 narrow_pat = opts.get('narrow')
156 narrow_pat = opts.get('narrow')
159
157
160 # if --narrow is passed, it means they are includes and excludes for narrow
158 # if --narrow is passed, it means they are includes and excludes for narrow
161 # clone
159 # clone
162 if not narrow_pat and (include or exclude or enableprofile):
160 if not narrow_pat and (include or exclude or enableprofile):
163
161
164 def clonesparse(orig, ctx, *args, **kwargs):
162 def clonesparse(orig, ctx, *args, **kwargs):
165 sparse.updateconfig(
163 sparse.updateconfig(
166 ctx.repo().unfiltered(),
164 ctx.repo().unfiltered(),
167 {},
165 {},
168 include=include,
166 include=include,
169 exclude=exclude,
167 exclude=exclude,
170 enableprofile=enableprofile,
168 enableprofile=enableprofile,
171 usereporootpaths=True,
169 usereporootpaths=True,
172 )
170 )
173 return orig(ctx, *args, **kwargs)
171 return orig(ctx, *args, **kwargs)
174
172
175 extensions.wrapfunction(mergemod, b'update', clonesparse)
173 extensions.wrapfunction(mergemod, b'update', clonesparse)
176 return orig(ui, repo, *args, **opts)
174 return orig(ui, repo, *args, **opts)
177
175
178
176
179 def _setupclone(ui):
177 def _setupclone(ui):
180 entry = commands.table[b'clone']
178 entry = commands.table[b'clone']
181 entry[1].append((b'', b'enable-profile', [], b'enable a sparse profile'))
179 entry[1].append((b'', b'enable-profile', [], b'enable a sparse profile'))
182 entry[1].append((b'', b'include', [], b'include sparse pattern'))
180 entry[1].append((b'', b'include', [], b'include sparse pattern'))
183 entry[1].append((b'', b'exclude', [], b'exclude sparse pattern'))
181 entry[1].append((b'', b'exclude', [], b'exclude sparse pattern'))
184 extensions.wrapcommand(commands.table, b'clone', _clonesparsecmd)
182 extensions.wrapcommand(commands.table, b'clone', _clonesparsecmd)
185
183
186
184
187 def _setupadd(ui):
185 def _setupadd(ui):
188 entry = commands.table[b'add']
186 entry = commands.table[b'add']
189 entry[1].append(
187 entry[1].append(
190 (
188 (
191 b's',
189 b's',
192 b'sparse',
190 b'sparse',
193 None,
191 None,
194 b'also include directories of added files in sparse config',
192 b'also include directories of added files in sparse config',
195 )
193 )
196 )
194 )
197
195
198 def _add(orig, ui, repo, *pats, **opts):
196 def _add(orig, ui, repo, *pats, **opts):
199 if opts.get('sparse'):
197 if opts.get('sparse'):
200 dirs = set()
198 dirs = set()
201 for pat in pats:
199 for pat in pats:
202 dirname, basename = util.split(pat)
200 dirname, basename = util.split(pat)
203 dirs.add(dirname)
201 dirs.add(dirname)
204 sparse.updateconfig(repo, opts, include=list(dirs))
202 sparse.updateconfig(repo, opts, include=list(dirs))
205 return orig(ui, repo, *pats, **opts)
203 return orig(ui, repo, *pats, **opts)
206
204
207 extensions.wrapcommand(commands.table, b'add', _add)
205 extensions.wrapcommand(commands.table, b'add', _add)
208
206
209
207
210 def _setupdirstate(ui):
211 """Modify the dirstate to prevent stat'ing excluded files,
212 and to prevent modifications to files outside the checkout.
213 """
214
215 # Prevent adding files that are outside the sparse checkout
216 editfuncs = [
217 b'set_tracked',
218 b'copy',
219 ]
220 hint = _(
221 b'include file with `hg debugsparse --include <pattern>` or use '
222 + b'`hg add -s <file>` to include file directory while adding'
223 )
224 for func in editfuncs:
225
226 def _wrapper(orig, self, *args, **kwargs):
227 sparsematch = self._sparsematcher
228 if sparsematch is not None and not sparsematch.always():
229 for f in args:
230 if f is not None and not sparsematch(f) and f not in self:
231 raise error.Abort(
232 _(
233 b"cannot add '%s' - it is outside "
234 b"the sparse checkout"
235 )
236 % f,
237 hint=hint,
238 )
239 return orig(self, *args, **kwargs)
240
241 extensions.wrapfunction(dirstate.dirstate, func, _wrapper)
242
243
244 @command(
208 @command(
245 b'debugsparse',
209 b'debugsparse',
246 [
210 [
247 (
211 (
248 b'I',
212 b'I',
249 b'include',
213 b'include',
250 [],
214 [],
251 _(b'include files in the sparse checkout'),
215 _(b'include files in the sparse checkout'),
252 _(b'PATTERN'),
216 _(b'PATTERN'),
253 ),
217 ),
254 (
218 (
255 b'X',
219 b'X',
256 b'exclude',
220 b'exclude',
257 [],
221 [],
258 _(b'exclude files in the sparse checkout'),
222 _(b'exclude files in the sparse checkout'),
259 _(b'PATTERN'),
223 _(b'PATTERN'),
260 ),
224 ),
261 (
225 (
262 b'd',
226 b'd',
263 b'delete',
227 b'delete',
264 [],
228 [],
265 _(b'delete an include/exclude rule'),
229 _(b'delete an include/exclude rule'),
266 _(b'PATTERN'),
230 _(b'PATTERN'),
267 ),
231 ),
268 (
232 (
269 b'f',
233 b'f',
270 b'force',
234 b'force',
271 False,
235 False,
272 _(b'allow changing rules even with pending changes'),
236 _(b'allow changing rules even with pending changes'),
273 ),
237 ),
274 (
238 (
275 b'',
239 b'',
276 b'enable-profile',
240 b'enable-profile',
277 [],
241 [],
278 _(b'enables the specified profile'),
242 _(b'enables the specified profile'),
279 _(b'PATTERN'),
243 _(b'PATTERN'),
280 ),
244 ),
281 (
245 (
282 b'',
246 b'',
283 b'disable-profile',
247 b'disable-profile',
284 [],
248 [],
285 _(b'disables the specified profile'),
249 _(b'disables the specified profile'),
286 _(b'PATTERN'),
250 _(b'PATTERN'),
287 ),
251 ),
288 (
252 (
289 b'',
253 b'',
290 b'import-rules',
254 b'import-rules',
291 [],
255 [],
292 _(b'imports rules from a file'),
256 _(b'imports rules from a file'),
293 _(b'PATTERN'),
257 _(b'PATTERN'),
294 ),
258 ),
295 (b'', b'clear-rules', False, _(b'clears local include/exclude rules')),
259 (b'', b'clear-rules', False, _(b'clears local include/exclude rules')),
296 (
260 (
297 b'',
261 b'',
298 b'refresh',
262 b'refresh',
299 False,
263 False,
300 _(b'updates the working after sparseness changes'),
264 _(b'updates the working after sparseness changes'),
301 ),
265 ),
302 (b'', b'reset', False, _(b'makes the repo full again')),
266 (b'', b'reset', False, _(b'makes the repo full again')),
303 ]
267 ]
304 + commands.templateopts,
268 + commands.templateopts,
305 _(b'[--OPTION]'),
269 _(b'[--OPTION]'),
306 helpbasic=True,
270 helpbasic=True,
307 )
271 )
308 def debugsparse(ui, repo, **opts):
272 def debugsparse(ui, repo, **opts):
309 """make the current checkout sparse, or edit the existing checkout
273 """make the current checkout sparse, or edit the existing checkout
310
274
311 The sparse command is used to make the current checkout sparse.
275 The sparse command is used to make the current checkout sparse.
312 This means files that don't meet the sparse condition will not be
276 This means files that don't meet the sparse condition will not be
313 written to disk, or show up in any working copy operations. It does
277 written to disk, or show up in any working copy operations. It does
314 not affect files in history in any way.
278 not affect files in history in any way.
315
279
316 Passing no arguments prints the currently applied sparse rules.
280 Passing no arguments prints the currently applied sparse rules.
317
281
318 --include and --exclude are used to add and remove files from the sparse
282 --include and --exclude are used to add and remove files from the sparse
319 checkout. The effects of adding an include or exclude rule are applied
283 checkout. The effects of adding an include or exclude rule are applied
320 immediately. If applying the new rule would cause a file with pending
284 immediately. If applying the new rule would cause a file with pending
321 changes to be added or removed, the command will fail. Pass --force to
285 changes to be added or removed, the command will fail. Pass --force to
322 force a rule change even with pending changes (the changes on disk will
286 force a rule change even with pending changes (the changes on disk will
323 be preserved).
287 be preserved).
324
288
325 --delete removes an existing include/exclude rule. The effects are
289 --delete removes an existing include/exclude rule. The effects are
326 immediate.
290 immediate.
327
291
328 --refresh refreshes the files on disk based on the sparse rules. This is
292 --refresh refreshes the files on disk based on the sparse rules. This is
329 only necessary if .hg/sparse was changed by hand.
293 only necessary if .hg/sparse was changed by hand.
330
294
331 --enable-profile and --disable-profile accept a path to a .hgsparse file.
295 --enable-profile and --disable-profile accept a path to a .hgsparse file.
332 This allows defining sparse checkouts and tracking them inside the
296 This allows defining sparse checkouts and tracking them inside the
333 repository. This is useful for defining commonly used sparse checkouts for
297 repository. This is useful for defining commonly used sparse checkouts for
334 many people to use. As the profile definition changes over time, the sparse
298 many people to use. As the profile definition changes over time, the sparse
335 checkout will automatically be updated appropriately, depending on which
299 checkout will automatically be updated appropriately, depending on which
336 changeset is checked out. Changes to .hgsparse are not applied until they
300 changeset is checked out. Changes to .hgsparse are not applied until they
337 have been committed.
301 have been committed.
338
302
339 --import-rules accepts a path to a file containing rules in the .hgsparse
303 --import-rules accepts a path to a file containing rules in the .hgsparse
340 format, allowing you to add --include, --exclude and --enable-profile rules
304 format, allowing you to add --include, --exclude and --enable-profile rules
341 in bulk. Like the --include, --exclude and --enable-profile switches, the
305 in bulk. Like the --include, --exclude and --enable-profile switches, the
342 changes are applied immediately.
306 changes are applied immediately.
343
307
344 --clear-rules removes all local include and exclude rules, while leaving
308 --clear-rules removes all local include and exclude rules, while leaving
345 any enabled profiles in place.
309 any enabled profiles in place.
346
310
347 Returns 0 if editing the sparse checkout succeeds.
311 Returns 0 if editing the sparse checkout succeeds.
348 """
312 """
349 opts = pycompat.byteskwargs(opts)
313 opts = pycompat.byteskwargs(opts)
350 include = opts.get(b'include')
314 include = opts.get(b'include')
351 exclude = opts.get(b'exclude')
315 exclude = opts.get(b'exclude')
352 force = opts.get(b'force')
316 force = opts.get(b'force')
353 enableprofile = opts.get(b'enable_profile')
317 enableprofile = opts.get(b'enable_profile')
354 disableprofile = opts.get(b'disable_profile')
318 disableprofile = opts.get(b'disable_profile')
355 importrules = opts.get(b'import_rules')
319 importrules = opts.get(b'import_rules')
356 clearrules = opts.get(b'clear_rules')
320 clearrules = opts.get(b'clear_rules')
357 delete = opts.get(b'delete')
321 delete = opts.get(b'delete')
358 refresh = opts.get(b'refresh')
322 refresh = opts.get(b'refresh')
359 reset = opts.get(b'reset')
323 reset = opts.get(b'reset')
360 action = cmdutil.check_at_most_one_arg(
324 action = cmdutil.check_at_most_one_arg(
361 opts, b'import_rules', b'clear_rules', b'refresh'
325 opts, b'import_rules', b'clear_rules', b'refresh'
362 )
326 )
363 updateconfig = bool(
327 updateconfig = bool(
364 include or exclude or delete or reset or enableprofile or disableprofile
328 include or exclude or delete or reset or enableprofile or disableprofile
365 )
329 )
366 count = sum([updateconfig, bool(action)])
330 count = sum([updateconfig, bool(action)])
367 if count > 1:
331 if count > 1:
368 raise error.Abort(_(b"too many flags specified"))
332 raise error.Abort(_(b"too many flags specified"))
369
333
370 # enable sparse on repo even if the requirements is missing.
334 # enable sparse on repo even if the requirements is missing.
371 repo._has_sparse = True
335 repo._has_sparse = True
372
336
373 if count == 0:
337 if count == 0:
374 if repo.vfs.exists(b'sparse'):
338 if repo.vfs.exists(b'sparse'):
375 ui.status(repo.vfs.read(b"sparse") + b"\n")
339 ui.status(repo.vfs.read(b"sparse") + b"\n")
376 temporaryincludes = sparse.readtemporaryincludes(repo)
340 temporaryincludes = sparse.readtemporaryincludes(repo)
377 if temporaryincludes:
341 if temporaryincludes:
378 ui.status(
342 ui.status(
379 _(b"Temporarily Included Files (for merge/rebase):\n")
343 _(b"Temporarily Included Files (for merge/rebase):\n")
380 )
344 )
381 ui.status((b"\n".join(temporaryincludes) + b"\n"))
345 ui.status((b"\n".join(temporaryincludes) + b"\n"))
382 return
346 return
383 else:
347 else:
384 raise error.Abort(
348 raise error.Abort(
385 _(
349 _(
386 b'the debugsparse command is only supported on'
350 b'the debugsparse command is only supported on'
387 b' sparse repositories'
351 b' sparse repositories'
388 )
352 )
389 )
353 )
390
354
391 if updateconfig:
355 if updateconfig:
392 sparse.updateconfig(
356 sparse.updateconfig(
393 repo,
357 repo,
394 opts,
358 opts,
395 include=include,
359 include=include,
396 exclude=exclude,
360 exclude=exclude,
397 reset=reset,
361 reset=reset,
398 delete=delete,
362 delete=delete,
399 enableprofile=enableprofile,
363 enableprofile=enableprofile,
400 disableprofile=disableprofile,
364 disableprofile=disableprofile,
401 force=force,
365 force=force,
402 )
366 )
403
367
404 if importrules:
368 if importrules:
405 sparse.importfromfiles(repo, opts, importrules, force=force)
369 sparse.importfromfiles(repo, opts, importrules, force=force)
406
370
407 if clearrules:
371 if clearrules:
408 sparse.clearrules(repo, force=force)
372 sparse.clearrules(repo, force=force)
409
373
410 if refresh:
374 if refresh:
411 try:
375 try:
412 wlock = repo.wlock()
376 wlock = repo.wlock()
413 fcounts = map(
377 fcounts = map(
414 len,
378 len,
415 sparse.refreshwdir(
379 sparse.refreshwdir(
416 repo, repo.status(), sparse.matcher(repo), force=force
380 repo, repo.status(), sparse.matcher(repo), force=force
417 ),
381 ),
418 )
382 )
419 sparse.printchanges(
383 sparse.printchanges(
420 ui,
384 ui,
421 opts,
385 opts,
422 added=fcounts[0],
386 added=fcounts[0],
423 dropped=fcounts[1],
387 dropped=fcounts[1],
424 conflicting=fcounts[2],
388 conflicting=fcounts[2],
425 )
389 )
426 finally:
390 finally:
427 wlock.release()
391 wlock.release()
428
392
429 del repo._has_sparse
393 del repo._has_sparse
@@ -1,1493 +1,1507 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import collections
9 import collections
10 import contextlib
10 import contextlib
11 import os
11 import os
12 import stat
12 import stat
13 import uuid
13 import uuid
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import delattr
16 from .pycompat import delattr
17
17
18 from hgdemandimport import tracing
18 from hgdemandimport import tracing
19
19
20 from . import (
20 from . import (
21 dirstatemap,
21 dirstatemap,
22 encoding,
22 encoding,
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 policy,
27 policy,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from .dirstateutils import (
33 from .dirstateutils import (
34 timestamp,
34 timestamp,
35 )
35 )
36
36
37 from .interfaces import (
37 from .interfaces import (
38 dirstate as intdirstate,
38 dirstate as intdirstate,
39 util as interfaceutil,
39 util as interfaceutil,
40 )
40 )
41
41
42 parsers = policy.importmod('parsers')
42 parsers = policy.importmod('parsers')
43 rustmod = policy.importrust('dirstate')
43 rustmod = policy.importrust('dirstate')
44
44
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
45 HAS_FAST_DIRSTATE_V2 = rustmod is not None
46
46
47 propertycache = util.propertycache
47 propertycache = util.propertycache
48 filecache = scmutil.filecache
48 filecache = scmutil.filecache
49 _rangemask = dirstatemap.rangemask
49 _rangemask = dirstatemap.rangemask
50
50
51 DirstateItem = dirstatemap.DirstateItem
51 DirstateItem = dirstatemap.DirstateItem
52
52
53
53
54 class repocache(filecache):
54 class repocache(filecache):
55 """filecache for files in .hg/"""
55 """filecache for files in .hg/"""
56
56
57 def join(self, obj, fname):
57 def join(self, obj, fname):
58 return obj._opener.join(fname)
58 return obj._opener.join(fname)
59
59
60
60
61 class rootcache(filecache):
61 class rootcache(filecache):
62 """filecache for files in the repository root"""
62 """filecache for files in the repository root"""
63
63
64 def join(self, obj, fname):
64 def join(self, obj, fname):
65 return obj._join(fname)
65 return obj._join(fname)
66
66
67
67
68 def requires_parents_change(func):
68 def requires_parents_change(func):
69 def wrap(self, *args, **kwargs):
69 def wrap(self, *args, **kwargs):
70 if not self.pendingparentchange():
70 if not self.pendingparentchange():
71 msg = 'calling `%s` outside of a parentchange context'
71 msg = 'calling `%s` outside of a parentchange context'
72 msg %= func.__name__
72 msg %= func.__name__
73 raise error.ProgrammingError(msg)
73 raise error.ProgrammingError(msg)
74 return func(self, *args, **kwargs)
74 return func(self, *args, **kwargs)
75
75
76 return wrap
76 return wrap
77
77
78
78
79 def requires_no_parents_change(func):
79 def requires_no_parents_change(func):
80 def wrap(self, *args, **kwargs):
80 def wrap(self, *args, **kwargs):
81 if self.pendingparentchange():
81 if self.pendingparentchange():
82 msg = 'calling `%s` inside of a parentchange context'
82 msg = 'calling `%s` inside of a parentchange context'
83 msg %= func.__name__
83 msg %= func.__name__
84 raise error.ProgrammingError(msg)
84 raise error.ProgrammingError(msg)
85 return func(self, *args, **kwargs)
85 return func(self, *args, **kwargs)
86
86
87 return wrap
87 return wrap
88
88
89
89
90 @interfaceutil.implementer(intdirstate.idirstate)
90 @interfaceutil.implementer(intdirstate.idirstate)
91 class dirstate:
91 class dirstate:
92 def __init__(
92 def __init__(
93 self,
93 self,
94 opener,
94 opener,
95 ui,
95 ui,
96 root,
96 root,
97 validate,
97 validate,
98 sparsematchfn,
98 sparsematchfn,
99 nodeconstants,
99 nodeconstants,
100 use_dirstate_v2,
100 use_dirstate_v2,
101 use_tracked_hint=False,
101 use_tracked_hint=False,
102 ):
102 ):
103 """Create a new dirstate object.
103 """Create a new dirstate object.
104
104
105 opener is an open()-like callable that can be used to open the
105 opener is an open()-like callable that can be used to open the
106 dirstate file; root is the root of the directory tracked by
106 dirstate file; root is the root of the directory tracked by
107 the dirstate.
107 the dirstate.
108 """
108 """
109 self._use_dirstate_v2 = use_dirstate_v2
109 self._use_dirstate_v2 = use_dirstate_v2
110 self._use_tracked_hint = use_tracked_hint
110 self._use_tracked_hint = use_tracked_hint
111 self._nodeconstants = nodeconstants
111 self._nodeconstants = nodeconstants
112 self._opener = opener
112 self._opener = opener
113 self._validate = validate
113 self._validate = validate
114 self._root = root
114 self._root = root
115 # Either build a sparse-matcher or None if sparse is disabled
115 # Either build a sparse-matcher or None if sparse is disabled
116 self._sparsematchfn = sparsematchfn
116 self._sparsematchfn = sparsematchfn
117 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
117 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
118 # UNC path pointing to root share (issue4557)
118 # UNC path pointing to root share (issue4557)
119 self._rootdir = pathutil.normasprefix(root)
119 self._rootdir = pathutil.normasprefix(root)
120 # True is any internal state may be different
120 # True is any internal state may be different
121 self._dirty = False
121 self._dirty = False
122 # True if the set of tracked file may be different
122 # True if the set of tracked file may be different
123 self._dirty_tracked_set = False
123 self._dirty_tracked_set = False
124 self._ui = ui
124 self._ui = ui
125 self._filecache = {}
125 self._filecache = {}
126 self._parentwriters = 0
126 self._parentwriters = 0
127 self._filename = b'dirstate'
127 self._filename = b'dirstate'
128 self._filename_th = b'dirstate-tracked-hint'
128 self._filename_th = b'dirstate-tracked-hint'
129 self._pendingfilename = b'%s.pending' % self._filename
129 self._pendingfilename = b'%s.pending' % self._filename
130 self._plchangecallbacks = {}
130 self._plchangecallbacks = {}
131 self._origpl = None
131 self._origpl = None
132 self._mapcls = dirstatemap.dirstatemap
132 self._mapcls = dirstatemap.dirstatemap
133 # Access and cache cwd early, so we don't access it for the first time
133 # Access and cache cwd early, so we don't access it for the first time
134 # after a working-copy update caused it to not exist (accessing it then
134 # after a working-copy update caused it to not exist (accessing it then
135 # raises an exception).
135 # raises an exception).
136 self._cwd
136 self._cwd
137
137
138 def prefetch_parents(self):
138 def prefetch_parents(self):
139 """make sure the parents are loaded
139 """make sure the parents are loaded
140
140
141 Used to avoid a race condition.
141 Used to avoid a race condition.
142 """
142 """
143 self._pl
143 self._pl
144
144
145 @contextlib.contextmanager
145 @contextlib.contextmanager
146 def parentchange(self):
146 def parentchange(self):
147 """Context manager for handling dirstate parents.
147 """Context manager for handling dirstate parents.
148
148
149 If an exception occurs in the scope of the context manager,
149 If an exception occurs in the scope of the context manager,
150 the incoherent dirstate won't be written when wlock is
150 the incoherent dirstate won't be written when wlock is
151 released.
151 released.
152 """
152 """
153 self._parentwriters += 1
153 self._parentwriters += 1
154 yield
154 yield
155 # Typically we want the "undo" step of a context manager in a
155 # Typically we want the "undo" step of a context manager in a
156 # finally block so it happens even when an exception
156 # finally block so it happens even when an exception
157 # occurs. In this case, however, we only want to decrement
157 # occurs. In this case, however, we only want to decrement
158 # parentwriters if the code in the with statement exits
158 # parentwriters if the code in the with statement exits
159 # normally, so we don't have a try/finally here on purpose.
159 # normally, so we don't have a try/finally here on purpose.
160 self._parentwriters -= 1
160 self._parentwriters -= 1
161
161
162 def pendingparentchange(self):
162 def pendingparentchange(self):
163 """Returns true if the dirstate is in the middle of a set of changes
163 """Returns true if the dirstate is in the middle of a set of changes
164 that modify the dirstate parent.
164 that modify the dirstate parent.
165 """
165 """
166 return self._parentwriters > 0
166 return self._parentwriters > 0
167
167
168 @propertycache
168 @propertycache
169 def _map(self):
169 def _map(self):
170 """Return the dirstate contents (see documentation for dirstatemap)."""
170 """Return the dirstate contents (see documentation for dirstatemap)."""
171 self._map = self._mapcls(
171 self._map = self._mapcls(
172 self._ui,
172 self._ui,
173 self._opener,
173 self._opener,
174 self._root,
174 self._root,
175 self._nodeconstants,
175 self._nodeconstants,
176 self._use_dirstate_v2,
176 self._use_dirstate_v2,
177 )
177 )
178 return self._map
178 return self._map
179
179
180 @property
180 @property
181 def _sparsematcher(self):
181 def _sparsematcher(self):
182 """The matcher for the sparse checkout.
182 """The matcher for the sparse checkout.
183
183
184 The working directory may not include every file from a manifest. The
184 The working directory may not include every file from a manifest. The
185 matcher obtained by this property will match a path if it is to be
185 matcher obtained by this property will match a path if it is to be
186 included in the working directory.
186 included in the working directory.
187
187
188 When sparse if disabled, return None.
188 When sparse if disabled, return None.
189 """
189 """
190 if self._sparsematchfn is None:
190 if self._sparsematchfn is None:
191 return None
191 return None
192 # TODO there is potential to cache this property. For now, the matcher
192 # TODO there is potential to cache this property. For now, the matcher
193 # is resolved on every access. (But the called function does use a
193 # is resolved on every access. (But the called function does use a
194 # cache to keep the lookup fast.)
194 # cache to keep the lookup fast.)
195 return self._sparsematchfn()
195 return self._sparsematchfn()
196
196
197 @repocache(b'branch')
197 @repocache(b'branch')
198 def _branch(self):
198 def _branch(self):
199 try:
199 try:
200 return self._opener.read(b"branch").strip() or b"default"
200 return self._opener.read(b"branch").strip() or b"default"
201 except FileNotFoundError:
201 except FileNotFoundError:
202 return b"default"
202 return b"default"
203
203
204 @property
204 @property
205 def _pl(self):
205 def _pl(self):
206 return self._map.parents()
206 return self._map.parents()
207
207
208 def hasdir(self, d):
208 def hasdir(self, d):
209 return self._map.hastrackeddir(d)
209 return self._map.hastrackeddir(d)
210
210
211 @rootcache(b'.hgignore')
211 @rootcache(b'.hgignore')
212 def _ignore(self):
212 def _ignore(self):
213 files = self._ignorefiles()
213 files = self._ignorefiles()
214 if not files:
214 if not files:
215 return matchmod.never()
215 return matchmod.never()
216
216
217 pats = [b'include:%s' % f for f in files]
217 pats = [b'include:%s' % f for f in files]
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
218 return matchmod.match(self._root, b'', [], pats, warn=self._ui.warn)
219
219
220 @propertycache
220 @propertycache
221 def _slash(self):
221 def _slash(self):
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
222 return self._ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/'
223
223
224 @propertycache
224 @propertycache
225 def _checklink(self):
225 def _checklink(self):
226 return util.checklink(self._root)
226 return util.checklink(self._root)
227
227
228 @propertycache
228 @propertycache
229 def _checkexec(self):
229 def _checkexec(self):
230 return bool(util.checkexec(self._root))
230 return bool(util.checkexec(self._root))
231
231
232 @propertycache
232 @propertycache
233 def _checkcase(self):
233 def _checkcase(self):
234 return not util.fscasesensitive(self._join(b'.hg'))
234 return not util.fscasesensitive(self._join(b'.hg'))
235
235
236 def _join(self, f):
236 def _join(self, f):
237 # much faster than os.path.join()
237 # much faster than os.path.join()
238 # it's safe because f is always a relative path
238 # it's safe because f is always a relative path
239 return self._rootdir + f
239 return self._rootdir + f
240
240
241 def flagfunc(self, buildfallback):
241 def flagfunc(self, buildfallback):
242 """build a callable that returns flags associated with a filename
242 """build a callable that returns flags associated with a filename
243
243
244 The information is extracted from three possible layers:
244 The information is extracted from three possible layers:
245 1. the file system if it supports the information
245 1. the file system if it supports the information
246 2. the "fallback" information stored in the dirstate if any
246 2. the "fallback" information stored in the dirstate if any
247 3. a more expensive mechanism inferring the flags from the parents.
247 3. a more expensive mechanism inferring the flags from the parents.
248 """
248 """
249
249
250 # small hack to cache the result of buildfallback()
250 # small hack to cache the result of buildfallback()
251 fallback_func = []
251 fallback_func = []
252
252
253 def get_flags(x):
253 def get_flags(x):
254 entry = None
254 entry = None
255 fallback_value = None
255 fallback_value = None
256 try:
256 try:
257 st = os.lstat(self._join(x))
257 st = os.lstat(self._join(x))
258 except OSError:
258 except OSError:
259 return b''
259 return b''
260
260
261 if self._checklink:
261 if self._checklink:
262 if util.statislink(st):
262 if util.statislink(st):
263 return b'l'
263 return b'l'
264 else:
264 else:
265 entry = self.get_entry(x)
265 entry = self.get_entry(x)
266 if entry.has_fallback_symlink:
266 if entry.has_fallback_symlink:
267 if entry.fallback_symlink:
267 if entry.fallback_symlink:
268 return b'l'
268 return b'l'
269 else:
269 else:
270 if not fallback_func:
270 if not fallback_func:
271 fallback_func.append(buildfallback())
271 fallback_func.append(buildfallback())
272 fallback_value = fallback_func[0](x)
272 fallback_value = fallback_func[0](x)
273 if b'l' in fallback_value:
273 if b'l' in fallback_value:
274 return b'l'
274 return b'l'
275
275
276 if self._checkexec:
276 if self._checkexec:
277 if util.statisexec(st):
277 if util.statisexec(st):
278 return b'x'
278 return b'x'
279 else:
279 else:
280 if entry is None:
280 if entry is None:
281 entry = self.get_entry(x)
281 entry = self.get_entry(x)
282 if entry.has_fallback_exec:
282 if entry.has_fallback_exec:
283 if entry.fallback_exec:
283 if entry.fallback_exec:
284 return b'x'
284 return b'x'
285 else:
285 else:
286 if fallback_value is None:
286 if fallback_value is None:
287 if not fallback_func:
287 if not fallback_func:
288 fallback_func.append(buildfallback())
288 fallback_func.append(buildfallback())
289 fallback_value = fallback_func[0](x)
289 fallback_value = fallback_func[0](x)
290 if b'x' in fallback_value:
290 if b'x' in fallback_value:
291 return b'x'
291 return b'x'
292 return b''
292 return b''
293
293
294 return get_flags
294 return get_flags
295
295
296 @propertycache
296 @propertycache
297 def _cwd(self):
297 def _cwd(self):
298 # internal config: ui.forcecwd
298 # internal config: ui.forcecwd
299 forcecwd = self._ui.config(b'ui', b'forcecwd')
299 forcecwd = self._ui.config(b'ui', b'forcecwd')
300 if forcecwd:
300 if forcecwd:
301 return forcecwd
301 return forcecwd
302 return encoding.getcwd()
302 return encoding.getcwd()
303
303
304 def getcwd(self):
304 def getcwd(self):
305 """Return the path from which a canonical path is calculated.
305 """Return the path from which a canonical path is calculated.
306
306
307 This path should be used to resolve file patterns or to convert
307 This path should be used to resolve file patterns or to convert
308 canonical paths back to file paths for display. It shouldn't be
308 canonical paths back to file paths for display. It shouldn't be
309 used to get real file paths. Use vfs functions instead.
309 used to get real file paths. Use vfs functions instead.
310 """
310 """
311 cwd = self._cwd
311 cwd = self._cwd
312 if cwd == self._root:
312 if cwd == self._root:
313 return b''
313 return b''
314 # self._root ends with a path separator if self._root is '/' or 'C:\'
314 # self._root ends with a path separator if self._root is '/' or 'C:\'
315 rootsep = self._root
315 rootsep = self._root
316 if not util.endswithsep(rootsep):
316 if not util.endswithsep(rootsep):
317 rootsep += pycompat.ossep
317 rootsep += pycompat.ossep
318 if cwd.startswith(rootsep):
318 if cwd.startswith(rootsep):
319 return cwd[len(rootsep) :]
319 return cwd[len(rootsep) :]
320 else:
320 else:
321 # we're outside the repo. return an absolute path.
321 # we're outside the repo. return an absolute path.
322 return cwd
322 return cwd
323
323
324 def pathto(self, f, cwd=None):
324 def pathto(self, f, cwd=None):
325 if cwd is None:
325 if cwd is None:
326 cwd = self.getcwd()
326 cwd = self.getcwd()
327 path = util.pathto(self._root, cwd, f)
327 path = util.pathto(self._root, cwd, f)
328 if self._slash:
328 if self._slash:
329 return util.pconvert(path)
329 return util.pconvert(path)
330 return path
330 return path
331
331
332 def get_entry(self, path):
332 def get_entry(self, path):
333 """return a DirstateItem for the associated path"""
333 """return a DirstateItem for the associated path"""
334 entry = self._map.get(path)
334 entry = self._map.get(path)
335 if entry is None:
335 if entry is None:
336 return DirstateItem()
336 return DirstateItem()
337 return entry
337 return entry
338
338
339 def __contains__(self, key):
339 def __contains__(self, key):
340 return key in self._map
340 return key in self._map
341
341
342 def __iter__(self):
342 def __iter__(self):
343 return iter(sorted(self._map))
343 return iter(sorted(self._map))
344
344
345 def items(self):
345 def items(self):
346 return self._map.items()
346 return self._map.items()
347
347
348 iteritems = items
348 iteritems = items
349
349
350 def parents(self):
350 def parents(self):
351 return [self._validate(p) for p in self._pl]
351 return [self._validate(p) for p in self._pl]
352
352
353 def p1(self):
353 def p1(self):
354 return self._validate(self._pl[0])
354 return self._validate(self._pl[0])
355
355
356 def p2(self):
356 def p2(self):
357 return self._validate(self._pl[1])
357 return self._validate(self._pl[1])
358
358
359 @property
359 @property
360 def in_merge(self):
360 def in_merge(self):
361 """True if a merge is in progress"""
361 """True if a merge is in progress"""
362 return self._pl[1] != self._nodeconstants.nullid
362 return self._pl[1] != self._nodeconstants.nullid
363
363
364 def branch(self):
364 def branch(self):
365 return encoding.tolocal(self._branch)
365 return encoding.tolocal(self._branch)
366
366
367 def setparents(self, p1, p2=None):
367 def setparents(self, p1, p2=None):
368 """Set dirstate parents to p1 and p2.
368 """Set dirstate parents to p1 and p2.
369
369
370 When moving from two parents to one, "merged" entries a
370 When moving from two parents to one, "merged" entries a
371 adjusted to normal and previous copy records discarded and
371 adjusted to normal and previous copy records discarded and
372 returned by the call.
372 returned by the call.
373
373
374 See localrepo.setparents()
374 See localrepo.setparents()
375 """
375 """
376 if p2 is None:
376 if p2 is None:
377 p2 = self._nodeconstants.nullid
377 p2 = self._nodeconstants.nullid
378 if self._parentwriters == 0:
378 if self._parentwriters == 0:
379 raise ValueError(
379 raise ValueError(
380 b"cannot set dirstate parent outside of "
380 b"cannot set dirstate parent outside of "
381 b"dirstate.parentchange context manager"
381 b"dirstate.parentchange context manager"
382 )
382 )
383
383
384 self._dirty = True
384 self._dirty = True
385 oldp2 = self._pl[1]
385 oldp2 = self._pl[1]
386 if self._origpl is None:
386 if self._origpl is None:
387 self._origpl = self._pl
387 self._origpl = self._pl
388 nullid = self._nodeconstants.nullid
388 nullid = self._nodeconstants.nullid
389 # True if we need to fold p2 related state back to a linear case
389 # True if we need to fold p2 related state back to a linear case
390 fold_p2 = oldp2 != nullid and p2 == nullid
390 fold_p2 = oldp2 != nullid and p2 == nullid
391 return self._map.setparents(p1, p2, fold_p2=fold_p2)
391 return self._map.setparents(p1, p2, fold_p2=fold_p2)
392
392
393 def setbranch(self, branch):
393 def setbranch(self, branch):
394 self.__class__._branch.set(self, encoding.fromlocal(branch))
394 self.__class__._branch.set(self, encoding.fromlocal(branch))
395 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
395 f = self._opener(b'branch', b'w', atomictemp=True, checkambig=True)
396 try:
396 try:
397 f.write(self._branch + b'\n')
397 f.write(self._branch + b'\n')
398 f.close()
398 f.close()
399
399
400 # make sure filecache has the correct stat info for _branch after
400 # make sure filecache has the correct stat info for _branch after
401 # replacing the underlying file
401 # replacing the underlying file
402 ce = self._filecache[b'_branch']
402 ce = self._filecache[b'_branch']
403 if ce:
403 if ce:
404 ce.refresh()
404 ce.refresh()
405 except: # re-raises
405 except: # re-raises
406 f.discard()
406 f.discard()
407 raise
407 raise
408
408
409 def invalidate(self):
409 def invalidate(self):
410 """Causes the next access to reread the dirstate.
410 """Causes the next access to reread the dirstate.
411
411
412 This is different from localrepo.invalidatedirstate() because it always
412 This is different from localrepo.invalidatedirstate() because it always
413 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
413 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
414 check whether the dirstate has changed before rereading it."""
414 check whether the dirstate has changed before rereading it."""
415
415
416 for a in ("_map", "_branch", "_ignore"):
416 for a in ("_map", "_branch", "_ignore"):
417 if a in self.__dict__:
417 if a in self.__dict__:
418 delattr(self, a)
418 delattr(self, a)
419 self._dirty = False
419 self._dirty = False
420 self._dirty_tracked_set = False
420 self._dirty_tracked_set = False
421 self._parentwriters = 0
421 self._parentwriters = 0
422 self._origpl = None
422 self._origpl = None
423
423
424 def copy(self, source, dest):
424 def copy(self, source, dest):
425 """Mark dest as a copy of source. Unmark dest if source is None."""
425 """Mark dest as a copy of source. Unmark dest if source is None."""
426 if source == dest:
426 if source == dest:
427 return
427 return
428 self._dirty = True
428 self._dirty = True
429 if source is not None:
429 if source is not None:
430 self._check_sparse(source)
430 self._map.copymap[dest] = source
431 self._map.copymap[dest] = source
431 else:
432 else:
432 self._map.copymap.pop(dest, None)
433 self._map.copymap.pop(dest, None)
433
434
434 def copied(self, file):
435 def copied(self, file):
435 return self._map.copymap.get(file, None)
436 return self._map.copymap.get(file, None)
436
437
437 def copies(self):
438 def copies(self):
438 return self._map.copymap
439 return self._map.copymap
439
440
440 @requires_no_parents_change
441 @requires_no_parents_change
441 def set_tracked(self, filename, reset_copy=False):
442 def set_tracked(self, filename, reset_copy=False):
442 """a "public" method for generic code to mark a file as tracked
443 """a "public" method for generic code to mark a file as tracked
443
444
444 This function is to be called outside of "update/merge" case. For
445 This function is to be called outside of "update/merge" case. For
445 example by a command like `hg add X`.
446 example by a command like `hg add X`.
446
447
447 if reset_copy is set, any existing copy information will be dropped.
448 if reset_copy is set, any existing copy information will be dropped.
448
449
449 return True the file was previously untracked, False otherwise.
450 return True the file was previously untracked, False otherwise.
450 """
451 """
451 self._dirty = True
452 self._dirty = True
452 entry = self._map.get(filename)
453 entry = self._map.get(filename)
453 if entry is None or not entry.tracked:
454 if entry is None or not entry.tracked:
454 self._check_new_tracked_filename(filename)
455 self._check_new_tracked_filename(filename)
455 pre_tracked = self._map.set_tracked(filename)
456 pre_tracked = self._map.set_tracked(filename)
456 if reset_copy:
457 if reset_copy:
457 self._map.copymap.pop(filename, None)
458 self._map.copymap.pop(filename, None)
458 if pre_tracked:
459 if pre_tracked:
459 self._dirty_tracked_set = True
460 self._dirty_tracked_set = True
460 return pre_tracked
461 return pre_tracked
461
462
462 @requires_no_parents_change
463 @requires_no_parents_change
463 def set_untracked(self, filename):
464 def set_untracked(self, filename):
464 """a "public" method for generic code to mark a file as untracked
465 """a "public" method for generic code to mark a file as untracked
465
466
466 This function is to be called outside of "update/merge" case. For
467 This function is to be called outside of "update/merge" case. For
467 example by a command like `hg remove X`.
468 example by a command like `hg remove X`.
468
469
469 return True the file was previously tracked, False otherwise.
470 return True the file was previously tracked, False otherwise.
470 """
471 """
471 ret = self._map.set_untracked(filename)
472 ret = self._map.set_untracked(filename)
472 if ret:
473 if ret:
473 self._dirty = True
474 self._dirty = True
474 self._dirty_tracked_set = True
475 self._dirty_tracked_set = True
475 return ret
476 return ret
476
477
477 @requires_no_parents_change
478 @requires_no_parents_change
478 def set_clean(self, filename, parentfiledata):
479 def set_clean(self, filename, parentfiledata):
479 """record that the current state of the file on disk is known to be clean"""
480 """record that the current state of the file on disk is known to be clean"""
480 self._dirty = True
481 self._dirty = True
481 if not self._map[filename].tracked:
482 if not self._map[filename].tracked:
482 self._check_new_tracked_filename(filename)
483 self._check_new_tracked_filename(filename)
483 (mode, size, mtime) = parentfiledata
484 (mode, size, mtime) = parentfiledata
484 self._map.set_clean(filename, mode, size, mtime)
485 self._map.set_clean(filename, mode, size, mtime)
485
486
486 @requires_no_parents_change
487 @requires_no_parents_change
487 def set_possibly_dirty(self, filename):
488 def set_possibly_dirty(self, filename):
488 """record that the current state of the file on disk is unknown"""
489 """record that the current state of the file on disk is unknown"""
489 self._dirty = True
490 self._dirty = True
490 self._map.set_possibly_dirty(filename)
491 self._map.set_possibly_dirty(filename)
491
492
492 @requires_parents_change
493 @requires_parents_change
493 def update_file_p1(
494 def update_file_p1(
494 self,
495 self,
495 filename,
496 filename,
496 p1_tracked,
497 p1_tracked,
497 ):
498 ):
498 """Set a file as tracked in the parent (or not)
499 """Set a file as tracked in the parent (or not)
499
500
500 This is to be called when adjust the dirstate to a new parent after an history
501 This is to be called when adjust the dirstate to a new parent after an history
501 rewriting operation.
502 rewriting operation.
502
503
503 It should not be called during a merge (p2 != nullid) and only within
504 It should not be called during a merge (p2 != nullid) and only within
504 a `with dirstate.parentchange():` context.
505 a `with dirstate.parentchange():` context.
505 """
506 """
506 if self.in_merge:
507 if self.in_merge:
507 msg = b'update_file_reference should not be called when merging'
508 msg = b'update_file_reference should not be called when merging'
508 raise error.ProgrammingError(msg)
509 raise error.ProgrammingError(msg)
509 entry = self._map.get(filename)
510 entry = self._map.get(filename)
510 if entry is None:
511 if entry is None:
511 wc_tracked = False
512 wc_tracked = False
512 else:
513 else:
513 wc_tracked = entry.tracked
514 wc_tracked = entry.tracked
514 if not (p1_tracked or wc_tracked):
515 if not (p1_tracked or wc_tracked):
515 # the file is no longer relevant to anyone
516 # the file is no longer relevant to anyone
516 if self._map.get(filename) is not None:
517 if self._map.get(filename) is not None:
517 self._map.reset_state(filename)
518 self._map.reset_state(filename)
518 self._dirty = True
519 self._dirty = True
519 elif (not p1_tracked) and wc_tracked:
520 elif (not p1_tracked) and wc_tracked:
520 if entry is not None and entry.added:
521 if entry is not None and entry.added:
521 return # avoid dropping copy information (maybe?)
522 return # avoid dropping copy information (maybe?)
522
523
523 self._map.reset_state(
524 self._map.reset_state(
524 filename,
525 filename,
525 wc_tracked,
526 wc_tracked,
526 p1_tracked,
527 p1_tracked,
527 # the underlying reference might have changed, we will have to
528 # the underlying reference might have changed, we will have to
528 # check it.
529 # check it.
529 has_meaningful_mtime=False,
530 has_meaningful_mtime=False,
530 )
531 )
531
532
532 @requires_parents_change
533 @requires_parents_change
533 def update_file(
534 def update_file(
534 self,
535 self,
535 filename,
536 filename,
536 wc_tracked,
537 wc_tracked,
537 p1_tracked,
538 p1_tracked,
538 p2_info=False,
539 p2_info=False,
539 possibly_dirty=False,
540 possibly_dirty=False,
540 parentfiledata=None,
541 parentfiledata=None,
541 ):
542 ):
542 """update the information about a file in the dirstate
543 """update the information about a file in the dirstate
543
544
544 This is to be called when the direstates parent changes to keep track
545 This is to be called when the direstates parent changes to keep track
545 of what is the file situation in regards to the working copy and its parent.
546 of what is the file situation in regards to the working copy and its parent.
546
547
547 This function must be called within a `dirstate.parentchange` context.
548 This function must be called within a `dirstate.parentchange` context.
548
549
549 note: the API is at an early stage and we might need to adjust it
550 note: the API is at an early stage and we might need to adjust it
550 depending of what information ends up being relevant and useful to
551 depending of what information ends up being relevant and useful to
551 other processing.
552 other processing.
552 """
553 """
553
554
554 # note: I do not think we need to double check name clash here since we
555 # note: I do not think we need to double check name clash here since we
555 # are in a update/merge case that should already have taken care of
556 # are in a update/merge case that should already have taken care of
556 # this. The test agrees
557 # this. The test agrees
557
558
558 self._dirty = True
559 self._dirty = True
559 old_entry = self._map.get(filename)
560 old_entry = self._map.get(filename)
560 if old_entry is None:
561 if old_entry is None:
561 prev_tracked = False
562 prev_tracked = False
562 else:
563 else:
563 prev_tracked = old_entry.tracked
564 prev_tracked = old_entry.tracked
564 if prev_tracked != wc_tracked:
565 if prev_tracked != wc_tracked:
565 self._dirty_tracked_set = True
566 self._dirty_tracked_set = True
566
567
567 self._map.reset_state(
568 self._map.reset_state(
568 filename,
569 filename,
569 wc_tracked,
570 wc_tracked,
570 p1_tracked,
571 p1_tracked,
571 p2_info=p2_info,
572 p2_info=p2_info,
572 has_meaningful_mtime=not possibly_dirty,
573 has_meaningful_mtime=not possibly_dirty,
573 parentfiledata=parentfiledata,
574 parentfiledata=parentfiledata,
574 )
575 )
575
576
576 def _check_new_tracked_filename(self, filename):
577 def _check_new_tracked_filename(self, filename):
577 scmutil.checkfilename(filename)
578 scmutil.checkfilename(filename)
578 if self._map.hastrackeddir(filename):
579 if self._map.hastrackeddir(filename):
579 msg = _(b'directory %r already in dirstate')
580 msg = _(b'directory %r already in dirstate')
580 msg %= pycompat.bytestr(filename)
581 msg %= pycompat.bytestr(filename)
581 raise error.Abort(msg)
582 raise error.Abort(msg)
582 # shadows
583 # shadows
583 for d in pathutil.finddirs(filename):
584 for d in pathutil.finddirs(filename):
584 if self._map.hastrackeddir(d):
585 if self._map.hastrackeddir(d):
585 break
586 break
586 entry = self._map.get(d)
587 entry = self._map.get(d)
587 if entry is not None and not entry.removed:
588 if entry is not None and not entry.removed:
588 msg = _(b'file %r in dirstate clashes with %r')
589 msg = _(b'file %r in dirstate clashes with %r')
589 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
590 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
590 raise error.Abort(msg)
591 raise error.Abort(msg)
592 self._check_sparse(filename)
593
594 def _check_sparse(self, filename):
595 """Check that a filename is inside the sparse profile"""
596 sparsematch = self._sparsematcher
597 if sparsematch is not None and not sparsematch.always():
598 if not sparsematch(filename):
599 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
600 hint = _(
601 b'include file with `hg debugsparse --include <pattern>` or use '
602 b'`hg add -s <file>` to include file directory while adding'
603 )
604 raise error.Abort(msg % filename, hint=hint)
591
605
592 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
606 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
593 if exists is None:
607 if exists is None:
594 exists = os.path.lexists(os.path.join(self._root, path))
608 exists = os.path.lexists(os.path.join(self._root, path))
595 if not exists:
609 if not exists:
596 # Maybe a path component exists
610 # Maybe a path component exists
597 if not ignoremissing and b'/' in path:
611 if not ignoremissing and b'/' in path:
598 d, f = path.rsplit(b'/', 1)
612 d, f = path.rsplit(b'/', 1)
599 d = self._normalize(d, False, ignoremissing, None)
613 d = self._normalize(d, False, ignoremissing, None)
600 folded = d + b"/" + f
614 folded = d + b"/" + f
601 else:
615 else:
602 # No path components, preserve original case
616 # No path components, preserve original case
603 folded = path
617 folded = path
604 else:
618 else:
605 # recursively normalize leading directory components
619 # recursively normalize leading directory components
606 # against dirstate
620 # against dirstate
607 if b'/' in normed:
621 if b'/' in normed:
608 d, f = normed.rsplit(b'/', 1)
622 d, f = normed.rsplit(b'/', 1)
609 d = self._normalize(d, False, ignoremissing, True)
623 d = self._normalize(d, False, ignoremissing, True)
610 r = self._root + b"/" + d
624 r = self._root + b"/" + d
611 folded = d + b"/" + util.fspath(f, r)
625 folded = d + b"/" + util.fspath(f, r)
612 else:
626 else:
613 folded = util.fspath(normed, self._root)
627 folded = util.fspath(normed, self._root)
614 storemap[normed] = folded
628 storemap[normed] = folded
615
629
616 return folded
630 return folded
617
631
618 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
632 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
619 normed = util.normcase(path)
633 normed = util.normcase(path)
620 folded = self._map.filefoldmap.get(normed, None)
634 folded = self._map.filefoldmap.get(normed, None)
621 if folded is None:
635 if folded is None:
622 if isknown:
636 if isknown:
623 folded = path
637 folded = path
624 else:
638 else:
625 folded = self._discoverpath(
639 folded = self._discoverpath(
626 path, normed, ignoremissing, exists, self._map.filefoldmap
640 path, normed, ignoremissing, exists, self._map.filefoldmap
627 )
641 )
628 return folded
642 return folded
629
643
630 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
644 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
631 normed = util.normcase(path)
645 normed = util.normcase(path)
632 folded = self._map.filefoldmap.get(normed, None)
646 folded = self._map.filefoldmap.get(normed, None)
633 if folded is None:
647 if folded is None:
634 folded = self._map.dirfoldmap.get(normed, None)
648 folded = self._map.dirfoldmap.get(normed, None)
635 if folded is None:
649 if folded is None:
636 if isknown:
650 if isknown:
637 folded = path
651 folded = path
638 else:
652 else:
639 # store discovered result in dirfoldmap so that future
653 # store discovered result in dirfoldmap so that future
640 # normalizefile calls don't start matching directories
654 # normalizefile calls don't start matching directories
641 folded = self._discoverpath(
655 folded = self._discoverpath(
642 path, normed, ignoremissing, exists, self._map.dirfoldmap
656 path, normed, ignoremissing, exists, self._map.dirfoldmap
643 )
657 )
644 return folded
658 return folded
645
659
646 def normalize(self, path, isknown=False, ignoremissing=False):
660 def normalize(self, path, isknown=False, ignoremissing=False):
647 """
661 """
648 normalize the case of a pathname when on a casefolding filesystem
662 normalize the case of a pathname when on a casefolding filesystem
649
663
650 isknown specifies whether the filename came from walking the
664 isknown specifies whether the filename came from walking the
651 disk, to avoid extra filesystem access.
665 disk, to avoid extra filesystem access.
652
666
653 If ignoremissing is True, missing path are returned
667 If ignoremissing is True, missing path are returned
654 unchanged. Otherwise, we try harder to normalize possibly
668 unchanged. Otherwise, we try harder to normalize possibly
655 existing path components.
669 existing path components.
656
670
657 The normalized case is determined based on the following precedence:
671 The normalized case is determined based on the following precedence:
658
672
659 - version of name already stored in the dirstate
673 - version of name already stored in the dirstate
660 - version of name stored on disk
674 - version of name stored on disk
661 - version provided via command arguments
675 - version provided via command arguments
662 """
676 """
663
677
664 if self._checkcase:
678 if self._checkcase:
665 return self._normalize(path, isknown, ignoremissing)
679 return self._normalize(path, isknown, ignoremissing)
666 return path
680 return path
667
681
668 def clear(self):
682 def clear(self):
669 self._map.clear()
683 self._map.clear()
670 self._dirty = True
684 self._dirty = True
671
685
672 def rebuild(self, parent, allfiles, changedfiles=None):
686 def rebuild(self, parent, allfiles, changedfiles=None):
673
687
674 matcher = self._sparsematcher
688 matcher = self._sparsematcher
675 if matcher is not None and not matcher.always():
689 if matcher is not None and not matcher.always():
676 # should not add non-matching files
690 # should not add non-matching files
677 allfiles = [f for f in allfiles if matcher(f)]
691 allfiles = [f for f in allfiles if matcher(f)]
678 if changedfiles:
692 if changedfiles:
679 changedfiles = [f for f in changedfiles if matcher(f)]
693 changedfiles = [f for f in changedfiles if matcher(f)]
680
694
681 if changedfiles is not None:
695 if changedfiles is not None:
682 # these files will be deleted from the dirstate when they are
696 # these files will be deleted from the dirstate when they are
683 # not found to be in allfiles
697 # not found to be in allfiles
684 dirstatefilestoremove = {f for f in self if not matcher(f)}
698 dirstatefilestoremove = {f for f in self if not matcher(f)}
685 changedfiles = dirstatefilestoremove.union(changedfiles)
699 changedfiles = dirstatefilestoremove.union(changedfiles)
686
700
687 if changedfiles is None:
701 if changedfiles is None:
688 # Rebuild entire dirstate
702 # Rebuild entire dirstate
689 to_lookup = allfiles
703 to_lookup = allfiles
690 to_drop = []
704 to_drop = []
691 self.clear()
705 self.clear()
692 elif len(changedfiles) < 10:
706 elif len(changedfiles) < 10:
693 # Avoid turning allfiles into a set, which can be expensive if it's
707 # Avoid turning allfiles into a set, which can be expensive if it's
694 # large.
708 # large.
695 to_lookup = []
709 to_lookup = []
696 to_drop = []
710 to_drop = []
697 for f in changedfiles:
711 for f in changedfiles:
698 if f in allfiles:
712 if f in allfiles:
699 to_lookup.append(f)
713 to_lookup.append(f)
700 else:
714 else:
701 to_drop.append(f)
715 to_drop.append(f)
702 else:
716 else:
703 changedfilesset = set(changedfiles)
717 changedfilesset = set(changedfiles)
704 to_lookup = changedfilesset & set(allfiles)
718 to_lookup = changedfilesset & set(allfiles)
705 to_drop = changedfilesset - to_lookup
719 to_drop = changedfilesset - to_lookup
706
720
707 if self._origpl is None:
721 if self._origpl is None:
708 self._origpl = self._pl
722 self._origpl = self._pl
709 self._map.setparents(parent, self._nodeconstants.nullid)
723 self._map.setparents(parent, self._nodeconstants.nullid)
710
724
711 for f in to_lookup:
725 for f in to_lookup:
712
726
713 if self.in_merge:
727 if self.in_merge:
714 self.set_tracked(f)
728 self.set_tracked(f)
715 else:
729 else:
716 self._map.reset_state(
730 self._map.reset_state(
717 f,
731 f,
718 wc_tracked=True,
732 wc_tracked=True,
719 p1_tracked=True,
733 p1_tracked=True,
720 )
734 )
721 for f in to_drop:
735 for f in to_drop:
722 self._map.reset_state(f)
736 self._map.reset_state(f)
723
737
724 self._dirty = True
738 self._dirty = True
725
739
726 def identity(self):
740 def identity(self):
727 """Return identity of dirstate itself to detect changing in storage
741 """Return identity of dirstate itself to detect changing in storage
728
742
729 If identity of previous dirstate is equal to this, writing
743 If identity of previous dirstate is equal to this, writing
730 changes based on the former dirstate out can keep consistency.
744 changes based on the former dirstate out can keep consistency.
731 """
745 """
732 return self._map.identity
746 return self._map.identity
733
747
734 def write(self, tr):
748 def write(self, tr):
735 if not self._dirty:
749 if not self._dirty:
736 return
750 return
737
751
738 write_key = self._use_tracked_hint and self._dirty_tracked_set
752 write_key = self._use_tracked_hint and self._dirty_tracked_set
739 if tr:
753 if tr:
740 # delay writing in-memory changes out
754 # delay writing in-memory changes out
741 tr.addfilegenerator(
755 tr.addfilegenerator(
742 b'dirstate-1-main',
756 b'dirstate-1-main',
743 (self._filename,),
757 (self._filename,),
744 lambda f: self._writedirstate(tr, f),
758 lambda f: self._writedirstate(tr, f),
745 location=b'plain',
759 location=b'plain',
746 post_finalize=True,
760 post_finalize=True,
747 )
761 )
748 if write_key:
762 if write_key:
749 tr.addfilegenerator(
763 tr.addfilegenerator(
750 b'dirstate-2-key-post',
764 b'dirstate-2-key-post',
751 (self._filename_th,),
765 (self._filename_th,),
752 lambda f: self._write_tracked_hint(tr, f),
766 lambda f: self._write_tracked_hint(tr, f),
753 location=b'plain',
767 location=b'plain',
754 post_finalize=True,
768 post_finalize=True,
755 )
769 )
756 return
770 return
757
771
758 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
772 file = lambda f: self._opener(f, b"w", atomictemp=True, checkambig=True)
759 with file(self._filename) as f:
773 with file(self._filename) as f:
760 self._writedirstate(tr, f)
774 self._writedirstate(tr, f)
761 if write_key:
775 if write_key:
762 # we update the key-file after writing to make sure reader have a
776 # we update the key-file after writing to make sure reader have a
763 # key that match the newly written content
777 # key that match the newly written content
764 with file(self._filename_th) as f:
778 with file(self._filename_th) as f:
765 self._write_tracked_hint(tr, f)
779 self._write_tracked_hint(tr, f)
766
780
767 def delete_tracked_hint(self):
781 def delete_tracked_hint(self):
768 """remove the tracked_hint file
782 """remove the tracked_hint file
769
783
770 To be used by format downgrades operation"""
784 To be used by format downgrades operation"""
771 self._opener.unlink(self._filename_th)
785 self._opener.unlink(self._filename_th)
772 self._use_tracked_hint = False
786 self._use_tracked_hint = False
773
787
774 def addparentchangecallback(self, category, callback):
788 def addparentchangecallback(self, category, callback):
775 """add a callback to be called when the wd parents are changed
789 """add a callback to be called when the wd parents are changed
776
790
777 Callback will be called with the following arguments:
791 Callback will be called with the following arguments:
778 dirstate, (oldp1, oldp2), (newp1, newp2)
792 dirstate, (oldp1, oldp2), (newp1, newp2)
779
793
780 Category is a unique identifier to allow overwriting an old callback
794 Category is a unique identifier to allow overwriting an old callback
781 with a newer callback.
795 with a newer callback.
782 """
796 """
783 self._plchangecallbacks[category] = callback
797 self._plchangecallbacks[category] = callback
784
798
785 def _writedirstate(self, tr, st):
799 def _writedirstate(self, tr, st):
786 # notify callbacks about parents change
800 # notify callbacks about parents change
787 if self._origpl is not None and self._origpl != self._pl:
801 if self._origpl is not None and self._origpl != self._pl:
788 for c, callback in sorted(self._plchangecallbacks.items()):
802 for c, callback in sorted(self._plchangecallbacks.items()):
789 callback(self, self._origpl, self._pl)
803 callback(self, self._origpl, self._pl)
790 self._origpl = None
804 self._origpl = None
791 self._map.write(tr, st)
805 self._map.write(tr, st)
792 self._dirty = False
806 self._dirty = False
793 self._dirty_tracked_set = False
807 self._dirty_tracked_set = False
794
808
795 def _write_tracked_hint(self, tr, f):
809 def _write_tracked_hint(self, tr, f):
796 key = node.hex(uuid.uuid4().bytes)
810 key = node.hex(uuid.uuid4().bytes)
797 f.write(b"1\n%s\n" % key) # 1 is the format version
811 f.write(b"1\n%s\n" % key) # 1 is the format version
798
812
799 def _dirignore(self, f):
813 def _dirignore(self, f):
800 if self._ignore(f):
814 if self._ignore(f):
801 return True
815 return True
802 for p in pathutil.finddirs(f):
816 for p in pathutil.finddirs(f):
803 if self._ignore(p):
817 if self._ignore(p):
804 return True
818 return True
805 return False
819 return False
806
820
807 def _ignorefiles(self):
821 def _ignorefiles(self):
808 files = []
822 files = []
809 if os.path.exists(self._join(b'.hgignore')):
823 if os.path.exists(self._join(b'.hgignore')):
810 files.append(self._join(b'.hgignore'))
824 files.append(self._join(b'.hgignore'))
811 for name, path in self._ui.configitems(b"ui"):
825 for name, path in self._ui.configitems(b"ui"):
812 if name == b'ignore' or name.startswith(b'ignore.'):
826 if name == b'ignore' or name.startswith(b'ignore.'):
813 # we need to use os.path.join here rather than self._join
827 # we need to use os.path.join here rather than self._join
814 # because path is arbitrary and user-specified
828 # because path is arbitrary and user-specified
815 files.append(os.path.join(self._rootdir, util.expandpath(path)))
829 files.append(os.path.join(self._rootdir, util.expandpath(path)))
816 return files
830 return files
817
831
818 def _ignorefileandline(self, f):
832 def _ignorefileandline(self, f):
819 files = collections.deque(self._ignorefiles())
833 files = collections.deque(self._ignorefiles())
820 visited = set()
834 visited = set()
821 while files:
835 while files:
822 i = files.popleft()
836 i = files.popleft()
823 patterns = matchmod.readpatternfile(
837 patterns = matchmod.readpatternfile(
824 i, self._ui.warn, sourceinfo=True
838 i, self._ui.warn, sourceinfo=True
825 )
839 )
826 for pattern, lineno, line in patterns:
840 for pattern, lineno, line in patterns:
827 kind, p = matchmod._patsplit(pattern, b'glob')
841 kind, p = matchmod._patsplit(pattern, b'glob')
828 if kind == b"subinclude":
842 if kind == b"subinclude":
829 if p not in visited:
843 if p not in visited:
830 files.append(p)
844 files.append(p)
831 continue
845 continue
832 m = matchmod.match(
846 m = matchmod.match(
833 self._root, b'', [], [pattern], warn=self._ui.warn
847 self._root, b'', [], [pattern], warn=self._ui.warn
834 )
848 )
835 if m(f):
849 if m(f):
836 return (i, lineno, line)
850 return (i, lineno, line)
837 visited.add(i)
851 visited.add(i)
838 return (None, -1, b"")
852 return (None, -1, b"")
839
853
840 def _walkexplicit(self, match, subrepos):
854 def _walkexplicit(self, match, subrepos):
841 """Get stat data about the files explicitly specified by match.
855 """Get stat data about the files explicitly specified by match.
842
856
843 Return a triple (results, dirsfound, dirsnotfound).
857 Return a triple (results, dirsfound, dirsnotfound).
844 - results is a mapping from filename to stat result. It also contains
858 - results is a mapping from filename to stat result. It also contains
845 listings mapping subrepos and .hg to None.
859 listings mapping subrepos and .hg to None.
846 - dirsfound is a list of files found to be directories.
860 - dirsfound is a list of files found to be directories.
847 - dirsnotfound is a list of files that the dirstate thinks are
861 - dirsnotfound is a list of files that the dirstate thinks are
848 directories and that were not found."""
862 directories and that were not found."""
849
863
850 def badtype(mode):
864 def badtype(mode):
851 kind = _(b'unknown')
865 kind = _(b'unknown')
852 if stat.S_ISCHR(mode):
866 if stat.S_ISCHR(mode):
853 kind = _(b'character device')
867 kind = _(b'character device')
854 elif stat.S_ISBLK(mode):
868 elif stat.S_ISBLK(mode):
855 kind = _(b'block device')
869 kind = _(b'block device')
856 elif stat.S_ISFIFO(mode):
870 elif stat.S_ISFIFO(mode):
857 kind = _(b'fifo')
871 kind = _(b'fifo')
858 elif stat.S_ISSOCK(mode):
872 elif stat.S_ISSOCK(mode):
859 kind = _(b'socket')
873 kind = _(b'socket')
860 elif stat.S_ISDIR(mode):
874 elif stat.S_ISDIR(mode):
861 kind = _(b'directory')
875 kind = _(b'directory')
862 return _(b'unsupported file type (type is %s)') % kind
876 return _(b'unsupported file type (type is %s)') % kind
863
877
864 badfn = match.bad
878 badfn = match.bad
865 dmap = self._map
879 dmap = self._map
866 lstat = os.lstat
880 lstat = os.lstat
867 getkind = stat.S_IFMT
881 getkind = stat.S_IFMT
868 dirkind = stat.S_IFDIR
882 dirkind = stat.S_IFDIR
869 regkind = stat.S_IFREG
883 regkind = stat.S_IFREG
870 lnkkind = stat.S_IFLNK
884 lnkkind = stat.S_IFLNK
871 join = self._join
885 join = self._join
872 dirsfound = []
886 dirsfound = []
873 foundadd = dirsfound.append
887 foundadd = dirsfound.append
874 dirsnotfound = []
888 dirsnotfound = []
875 notfoundadd = dirsnotfound.append
889 notfoundadd = dirsnotfound.append
876
890
877 if not match.isexact() and self._checkcase:
891 if not match.isexact() and self._checkcase:
878 normalize = self._normalize
892 normalize = self._normalize
879 else:
893 else:
880 normalize = None
894 normalize = None
881
895
882 files = sorted(match.files())
896 files = sorted(match.files())
883 subrepos.sort()
897 subrepos.sort()
884 i, j = 0, 0
898 i, j = 0, 0
885 while i < len(files) and j < len(subrepos):
899 while i < len(files) and j < len(subrepos):
886 subpath = subrepos[j] + b"/"
900 subpath = subrepos[j] + b"/"
887 if files[i] < subpath:
901 if files[i] < subpath:
888 i += 1
902 i += 1
889 continue
903 continue
890 while i < len(files) and files[i].startswith(subpath):
904 while i < len(files) and files[i].startswith(subpath):
891 del files[i]
905 del files[i]
892 j += 1
906 j += 1
893
907
894 if not files or b'' in files:
908 if not files or b'' in files:
895 files = [b'']
909 files = [b'']
896 # constructing the foldmap is expensive, so don't do it for the
910 # constructing the foldmap is expensive, so don't do it for the
897 # common case where files is ['']
911 # common case where files is ['']
898 normalize = None
912 normalize = None
899 results = dict.fromkeys(subrepos)
913 results = dict.fromkeys(subrepos)
900 results[b'.hg'] = None
914 results[b'.hg'] = None
901
915
902 for ff in files:
916 for ff in files:
903 if normalize:
917 if normalize:
904 nf = normalize(ff, False, True)
918 nf = normalize(ff, False, True)
905 else:
919 else:
906 nf = ff
920 nf = ff
907 if nf in results:
921 if nf in results:
908 continue
922 continue
909
923
910 try:
924 try:
911 st = lstat(join(nf))
925 st = lstat(join(nf))
912 kind = getkind(st.st_mode)
926 kind = getkind(st.st_mode)
913 if kind == dirkind:
927 if kind == dirkind:
914 if nf in dmap:
928 if nf in dmap:
915 # file replaced by dir on disk but still in dirstate
929 # file replaced by dir on disk but still in dirstate
916 results[nf] = None
930 results[nf] = None
917 foundadd((nf, ff))
931 foundadd((nf, ff))
918 elif kind == regkind or kind == lnkkind:
932 elif kind == regkind or kind == lnkkind:
919 results[nf] = st
933 results[nf] = st
920 else:
934 else:
921 badfn(ff, badtype(kind))
935 badfn(ff, badtype(kind))
922 if nf in dmap:
936 if nf in dmap:
923 results[nf] = None
937 results[nf] = None
924 except OSError as inst: # nf not found on disk - it is dirstate only
938 except OSError as inst: # nf not found on disk - it is dirstate only
925 if nf in dmap: # does it exactly match a missing file?
939 if nf in dmap: # does it exactly match a missing file?
926 results[nf] = None
940 results[nf] = None
927 else: # does it match a missing directory?
941 else: # does it match a missing directory?
928 if self._map.hasdir(nf):
942 if self._map.hasdir(nf):
929 notfoundadd(nf)
943 notfoundadd(nf)
930 else:
944 else:
931 badfn(ff, encoding.strtolocal(inst.strerror))
945 badfn(ff, encoding.strtolocal(inst.strerror))
932
946
933 # match.files() may contain explicitly-specified paths that shouldn't
947 # match.files() may contain explicitly-specified paths that shouldn't
934 # be taken; drop them from the list of files found. dirsfound/notfound
948 # be taken; drop them from the list of files found. dirsfound/notfound
935 # aren't filtered here because they will be tested later.
949 # aren't filtered here because they will be tested later.
936 if match.anypats():
950 if match.anypats():
937 for f in list(results):
951 for f in list(results):
938 if f == b'.hg' or f in subrepos:
952 if f == b'.hg' or f in subrepos:
939 # keep sentinel to disable further out-of-repo walks
953 # keep sentinel to disable further out-of-repo walks
940 continue
954 continue
941 if not match(f):
955 if not match(f):
942 del results[f]
956 del results[f]
943
957
944 # Case insensitive filesystems cannot rely on lstat() failing to detect
958 # Case insensitive filesystems cannot rely on lstat() failing to detect
945 # a case-only rename. Prune the stat object for any file that does not
959 # a case-only rename. Prune the stat object for any file that does not
946 # match the case in the filesystem, if there are multiple files that
960 # match the case in the filesystem, if there are multiple files that
947 # normalize to the same path.
961 # normalize to the same path.
948 if match.isexact() and self._checkcase:
962 if match.isexact() and self._checkcase:
949 normed = {}
963 normed = {}
950
964
951 for f, st in results.items():
965 for f, st in results.items():
952 if st is None:
966 if st is None:
953 continue
967 continue
954
968
955 nc = util.normcase(f)
969 nc = util.normcase(f)
956 paths = normed.get(nc)
970 paths = normed.get(nc)
957
971
958 if paths is None:
972 if paths is None:
959 paths = set()
973 paths = set()
960 normed[nc] = paths
974 normed[nc] = paths
961
975
962 paths.add(f)
976 paths.add(f)
963
977
964 for norm, paths in normed.items():
978 for norm, paths in normed.items():
965 if len(paths) > 1:
979 if len(paths) > 1:
966 for path in paths:
980 for path in paths:
967 folded = self._discoverpath(
981 folded = self._discoverpath(
968 path, norm, True, None, self._map.dirfoldmap
982 path, norm, True, None, self._map.dirfoldmap
969 )
983 )
970 if path != folded:
984 if path != folded:
971 results[path] = None
985 results[path] = None
972
986
973 return results, dirsfound, dirsnotfound
987 return results, dirsfound, dirsnotfound
974
988
975 def walk(self, match, subrepos, unknown, ignored, full=True):
989 def walk(self, match, subrepos, unknown, ignored, full=True):
976 """
990 """
977 Walk recursively through the directory tree, finding all files
991 Walk recursively through the directory tree, finding all files
978 matched by match.
992 matched by match.
979
993
980 If full is False, maybe skip some known-clean files.
994 If full is False, maybe skip some known-clean files.
981
995
982 Return a dict mapping filename to stat-like object (either
996 Return a dict mapping filename to stat-like object (either
983 mercurial.osutil.stat instance or return value of os.stat()).
997 mercurial.osutil.stat instance or return value of os.stat()).
984
998
985 """
999 """
986 # full is a flag that extensions that hook into walk can use -- this
1000 # full is a flag that extensions that hook into walk can use -- this
987 # implementation doesn't use it at all. This satisfies the contract
1001 # implementation doesn't use it at all. This satisfies the contract
988 # because we only guarantee a "maybe".
1002 # because we only guarantee a "maybe".
989
1003
990 if ignored:
1004 if ignored:
991 ignore = util.never
1005 ignore = util.never
992 dirignore = util.never
1006 dirignore = util.never
993 elif unknown:
1007 elif unknown:
994 ignore = self._ignore
1008 ignore = self._ignore
995 dirignore = self._dirignore
1009 dirignore = self._dirignore
996 else:
1010 else:
997 # if not unknown and not ignored, drop dir recursion and step 2
1011 # if not unknown and not ignored, drop dir recursion and step 2
998 ignore = util.always
1012 ignore = util.always
999 dirignore = util.always
1013 dirignore = util.always
1000
1014
1001 if self._sparsematchfn is not None:
1015 if self._sparsematchfn is not None:
1002 em = matchmod.exact(match.files())
1016 em = matchmod.exact(match.files())
1003 sm = matchmod.unionmatcher([self._sparsematcher, em])
1017 sm = matchmod.unionmatcher([self._sparsematcher, em])
1004 match = matchmod.intersectmatchers(match, sm)
1018 match = matchmod.intersectmatchers(match, sm)
1005
1019
1006 matchfn = match.matchfn
1020 matchfn = match.matchfn
1007 matchalways = match.always()
1021 matchalways = match.always()
1008 matchtdir = match.traversedir
1022 matchtdir = match.traversedir
1009 dmap = self._map
1023 dmap = self._map
1010 listdir = util.listdir
1024 listdir = util.listdir
1011 lstat = os.lstat
1025 lstat = os.lstat
1012 dirkind = stat.S_IFDIR
1026 dirkind = stat.S_IFDIR
1013 regkind = stat.S_IFREG
1027 regkind = stat.S_IFREG
1014 lnkkind = stat.S_IFLNK
1028 lnkkind = stat.S_IFLNK
1015 join = self._join
1029 join = self._join
1016
1030
1017 exact = skipstep3 = False
1031 exact = skipstep3 = False
1018 if match.isexact(): # match.exact
1032 if match.isexact(): # match.exact
1019 exact = True
1033 exact = True
1020 dirignore = util.always # skip step 2
1034 dirignore = util.always # skip step 2
1021 elif match.prefix(): # match.match, no patterns
1035 elif match.prefix(): # match.match, no patterns
1022 skipstep3 = True
1036 skipstep3 = True
1023
1037
1024 if not exact and self._checkcase:
1038 if not exact and self._checkcase:
1025 normalize = self._normalize
1039 normalize = self._normalize
1026 normalizefile = self._normalizefile
1040 normalizefile = self._normalizefile
1027 skipstep3 = False
1041 skipstep3 = False
1028 else:
1042 else:
1029 normalize = self._normalize
1043 normalize = self._normalize
1030 normalizefile = None
1044 normalizefile = None
1031
1045
1032 # step 1: find all explicit files
1046 # step 1: find all explicit files
1033 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1047 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1034 if matchtdir:
1048 if matchtdir:
1035 for d in work:
1049 for d in work:
1036 matchtdir(d[0])
1050 matchtdir(d[0])
1037 for d in dirsnotfound:
1051 for d in dirsnotfound:
1038 matchtdir(d)
1052 matchtdir(d)
1039
1053
1040 skipstep3 = skipstep3 and not (work or dirsnotfound)
1054 skipstep3 = skipstep3 and not (work or dirsnotfound)
1041 work = [d for d in work if not dirignore(d[0])]
1055 work = [d for d in work if not dirignore(d[0])]
1042
1056
1043 # step 2: visit subdirectories
1057 # step 2: visit subdirectories
1044 def traverse(work, alreadynormed):
1058 def traverse(work, alreadynormed):
1045 wadd = work.append
1059 wadd = work.append
1046 while work:
1060 while work:
1047 tracing.counter('dirstate.walk work', len(work))
1061 tracing.counter('dirstate.walk work', len(work))
1048 nd = work.pop()
1062 nd = work.pop()
1049 visitentries = match.visitchildrenset(nd)
1063 visitentries = match.visitchildrenset(nd)
1050 if not visitentries:
1064 if not visitentries:
1051 continue
1065 continue
1052 if visitentries == b'this' or visitentries == b'all':
1066 if visitentries == b'this' or visitentries == b'all':
1053 visitentries = None
1067 visitentries = None
1054 skip = None
1068 skip = None
1055 if nd != b'':
1069 if nd != b'':
1056 skip = b'.hg'
1070 skip = b'.hg'
1057 try:
1071 try:
1058 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1072 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1059 entries = listdir(join(nd), stat=True, skip=skip)
1073 entries = listdir(join(nd), stat=True, skip=skip)
1060 except (PermissionError, FileNotFoundError) as inst:
1074 except (PermissionError, FileNotFoundError) as inst:
1061 match.bad(
1075 match.bad(
1062 self.pathto(nd), encoding.strtolocal(inst.strerror)
1076 self.pathto(nd), encoding.strtolocal(inst.strerror)
1063 )
1077 )
1064 continue
1078 continue
1065 for f, kind, st in entries:
1079 for f, kind, st in entries:
1066 # Some matchers may return files in the visitentries set,
1080 # Some matchers may return files in the visitentries set,
1067 # instead of 'this', if the matcher explicitly mentions them
1081 # instead of 'this', if the matcher explicitly mentions them
1068 # and is not an exactmatcher. This is acceptable; we do not
1082 # and is not an exactmatcher. This is acceptable; we do not
1069 # make any hard assumptions about file-or-directory below
1083 # make any hard assumptions about file-or-directory below
1070 # based on the presence of `f` in visitentries. If
1084 # based on the presence of `f` in visitentries. If
1071 # visitchildrenset returned a set, we can always skip the
1085 # visitchildrenset returned a set, we can always skip the
1072 # entries *not* in the set it provided regardless of whether
1086 # entries *not* in the set it provided regardless of whether
1073 # they're actually a file or a directory.
1087 # they're actually a file or a directory.
1074 if visitentries and f not in visitentries:
1088 if visitentries and f not in visitentries:
1075 continue
1089 continue
1076 if normalizefile:
1090 if normalizefile:
1077 # even though f might be a directory, we're only
1091 # even though f might be a directory, we're only
1078 # interested in comparing it to files currently in the
1092 # interested in comparing it to files currently in the
1079 # dmap -- therefore normalizefile is enough
1093 # dmap -- therefore normalizefile is enough
1080 nf = normalizefile(
1094 nf = normalizefile(
1081 nd and (nd + b"/" + f) or f, True, True
1095 nd and (nd + b"/" + f) or f, True, True
1082 )
1096 )
1083 else:
1097 else:
1084 nf = nd and (nd + b"/" + f) or f
1098 nf = nd and (nd + b"/" + f) or f
1085 if nf not in results:
1099 if nf not in results:
1086 if kind == dirkind:
1100 if kind == dirkind:
1087 if not ignore(nf):
1101 if not ignore(nf):
1088 if matchtdir:
1102 if matchtdir:
1089 matchtdir(nf)
1103 matchtdir(nf)
1090 wadd(nf)
1104 wadd(nf)
1091 if nf in dmap and (matchalways or matchfn(nf)):
1105 if nf in dmap and (matchalways or matchfn(nf)):
1092 results[nf] = None
1106 results[nf] = None
1093 elif kind == regkind or kind == lnkkind:
1107 elif kind == regkind or kind == lnkkind:
1094 if nf in dmap:
1108 if nf in dmap:
1095 if matchalways or matchfn(nf):
1109 if matchalways or matchfn(nf):
1096 results[nf] = st
1110 results[nf] = st
1097 elif (matchalways or matchfn(nf)) and not ignore(
1111 elif (matchalways or matchfn(nf)) and not ignore(
1098 nf
1112 nf
1099 ):
1113 ):
1100 # unknown file -- normalize if necessary
1114 # unknown file -- normalize if necessary
1101 if not alreadynormed:
1115 if not alreadynormed:
1102 nf = normalize(nf, False, True)
1116 nf = normalize(nf, False, True)
1103 results[nf] = st
1117 results[nf] = st
1104 elif nf in dmap and (matchalways or matchfn(nf)):
1118 elif nf in dmap and (matchalways or matchfn(nf)):
1105 results[nf] = None
1119 results[nf] = None
1106
1120
1107 for nd, d in work:
1121 for nd, d in work:
1108 # alreadynormed means that processwork doesn't have to do any
1122 # alreadynormed means that processwork doesn't have to do any
1109 # expensive directory normalization
1123 # expensive directory normalization
1110 alreadynormed = not normalize or nd == d
1124 alreadynormed = not normalize or nd == d
1111 traverse([d], alreadynormed)
1125 traverse([d], alreadynormed)
1112
1126
1113 for s in subrepos:
1127 for s in subrepos:
1114 del results[s]
1128 del results[s]
1115 del results[b'.hg']
1129 del results[b'.hg']
1116
1130
1117 # step 3: visit remaining files from dmap
1131 # step 3: visit remaining files from dmap
1118 if not skipstep3 and not exact:
1132 if not skipstep3 and not exact:
1119 # If a dmap file is not in results yet, it was either
1133 # If a dmap file is not in results yet, it was either
1120 # a) not matching matchfn b) ignored, c) missing, or d) under a
1134 # a) not matching matchfn b) ignored, c) missing, or d) under a
1121 # symlink directory.
1135 # symlink directory.
1122 if not results and matchalways:
1136 if not results and matchalways:
1123 visit = [f for f in dmap]
1137 visit = [f for f in dmap]
1124 else:
1138 else:
1125 visit = [f for f in dmap if f not in results and matchfn(f)]
1139 visit = [f for f in dmap if f not in results and matchfn(f)]
1126 visit.sort()
1140 visit.sort()
1127
1141
1128 if unknown:
1142 if unknown:
1129 # unknown == True means we walked all dirs under the roots
1143 # unknown == True means we walked all dirs under the roots
1130 # that wasn't ignored, and everything that matched was stat'ed
1144 # that wasn't ignored, and everything that matched was stat'ed
1131 # and is already in results.
1145 # and is already in results.
1132 # The rest must thus be ignored or under a symlink.
1146 # The rest must thus be ignored or under a symlink.
1133 audit_path = pathutil.pathauditor(self._root, cached=True)
1147 audit_path = pathutil.pathauditor(self._root, cached=True)
1134
1148
1135 for nf in iter(visit):
1149 for nf in iter(visit):
1136 # If a stat for the same file was already added with a
1150 # If a stat for the same file was already added with a
1137 # different case, don't add one for this, since that would
1151 # different case, don't add one for this, since that would
1138 # make it appear as if the file exists under both names
1152 # make it appear as if the file exists under both names
1139 # on disk.
1153 # on disk.
1140 if (
1154 if (
1141 normalizefile
1155 normalizefile
1142 and normalizefile(nf, True, True) in results
1156 and normalizefile(nf, True, True) in results
1143 ):
1157 ):
1144 results[nf] = None
1158 results[nf] = None
1145 # Report ignored items in the dmap as long as they are not
1159 # Report ignored items in the dmap as long as they are not
1146 # under a symlink directory.
1160 # under a symlink directory.
1147 elif audit_path.check(nf):
1161 elif audit_path.check(nf):
1148 try:
1162 try:
1149 results[nf] = lstat(join(nf))
1163 results[nf] = lstat(join(nf))
1150 # file was just ignored, no links, and exists
1164 # file was just ignored, no links, and exists
1151 except OSError:
1165 except OSError:
1152 # file doesn't exist
1166 # file doesn't exist
1153 results[nf] = None
1167 results[nf] = None
1154 else:
1168 else:
1155 # It's either missing or under a symlink directory
1169 # It's either missing or under a symlink directory
1156 # which we in this case report as missing
1170 # which we in this case report as missing
1157 results[nf] = None
1171 results[nf] = None
1158 else:
1172 else:
1159 # We may not have walked the full directory tree above,
1173 # We may not have walked the full directory tree above,
1160 # so stat and check everything we missed.
1174 # so stat and check everything we missed.
1161 iv = iter(visit)
1175 iv = iter(visit)
1162 for st in util.statfiles([join(i) for i in visit]):
1176 for st in util.statfiles([join(i) for i in visit]):
1163 results[next(iv)] = st
1177 results[next(iv)] = st
1164 return results
1178 return results
1165
1179
1166 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1180 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1167 # Force Rayon (Rust parallelism library) to respect the number of
1181 # Force Rayon (Rust parallelism library) to respect the number of
1168 # workers. This is a temporary workaround until Rust code knows
1182 # workers. This is a temporary workaround until Rust code knows
1169 # how to read the config file.
1183 # how to read the config file.
1170 numcpus = self._ui.configint(b"worker", b"numcpus")
1184 numcpus = self._ui.configint(b"worker", b"numcpus")
1171 if numcpus is not None:
1185 if numcpus is not None:
1172 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1186 encoding.environ.setdefault(b'RAYON_NUM_THREADS', b'%d' % numcpus)
1173
1187
1174 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1188 workers_enabled = self._ui.configbool(b"worker", b"enabled", True)
1175 if not workers_enabled:
1189 if not workers_enabled:
1176 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1190 encoding.environ[b"RAYON_NUM_THREADS"] = b"1"
1177
1191
1178 (
1192 (
1179 lookup,
1193 lookup,
1180 modified,
1194 modified,
1181 added,
1195 added,
1182 removed,
1196 removed,
1183 deleted,
1197 deleted,
1184 clean,
1198 clean,
1185 ignored,
1199 ignored,
1186 unknown,
1200 unknown,
1187 warnings,
1201 warnings,
1188 bad,
1202 bad,
1189 traversed,
1203 traversed,
1190 dirty,
1204 dirty,
1191 ) = rustmod.status(
1205 ) = rustmod.status(
1192 self._map._map,
1206 self._map._map,
1193 matcher,
1207 matcher,
1194 self._rootdir,
1208 self._rootdir,
1195 self._ignorefiles(),
1209 self._ignorefiles(),
1196 self._checkexec,
1210 self._checkexec,
1197 bool(list_clean),
1211 bool(list_clean),
1198 bool(list_ignored),
1212 bool(list_ignored),
1199 bool(list_unknown),
1213 bool(list_unknown),
1200 bool(matcher.traversedir),
1214 bool(matcher.traversedir),
1201 )
1215 )
1202
1216
1203 self._dirty |= dirty
1217 self._dirty |= dirty
1204
1218
1205 if matcher.traversedir:
1219 if matcher.traversedir:
1206 for dir in traversed:
1220 for dir in traversed:
1207 matcher.traversedir(dir)
1221 matcher.traversedir(dir)
1208
1222
1209 if self._ui.warn:
1223 if self._ui.warn:
1210 for item in warnings:
1224 for item in warnings:
1211 if isinstance(item, tuple):
1225 if isinstance(item, tuple):
1212 file_path, syntax = item
1226 file_path, syntax = item
1213 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1227 msg = _(b"%s: ignoring invalid syntax '%s'\n") % (
1214 file_path,
1228 file_path,
1215 syntax,
1229 syntax,
1216 )
1230 )
1217 self._ui.warn(msg)
1231 self._ui.warn(msg)
1218 else:
1232 else:
1219 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1233 msg = _(b"skipping unreadable pattern file '%s': %s\n")
1220 self._ui.warn(
1234 self._ui.warn(
1221 msg
1235 msg
1222 % (
1236 % (
1223 pathutil.canonpath(
1237 pathutil.canonpath(
1224 self._rootdir, self._rootdir, item
1238 self._rootdir, self._rootdir, item
1225 ),
1239 ),
1226 b"No such file or directory",
1240 b"No such file or directory",
1227 )
1241 )
1228 )
1242 )
1229
1243
1230 for (fn, message) in bad:
1244 for (fn, message) in bad:
1231 matcher.bad(fn, encoding.strtolocal(message))
1245 matcher.bad(fn, encoding.strtolocal(message))
1232
1246
1233 status = scmutil.status(
1247 status = scmutil.status(
1234 modified=modified,
1248 modified=modified,
1235 added=added,
1249 added=added,
1236 removed=removed,
1250 removed=removed,
1237 deleted=deleted,
1251 deleted=deleted,
1238 unknown=unknown,
1252 unknown=unknown,
1239 ignored=ignored,
1253 ignored=ignored,
1240 clean=clean,
1254 clean=clean,
1241 )
1255 )
1242 return (lookup, status)
1256 return (lookup, status)
1243
1257
1244 def status(self, match, subrepos, ignored, clean, unknown):
1258 def status(self, match, subrepos, ignored, clean, unknown):
1245 """Determine the status of the working copy relative to the
1259 """Determine the status of the working copy relative to the
1246 dirstate and return a pair of (unsure, status), where status is of type
1260 dirstate and return a pair of (unsure, status), where status is of type
1247 scmutil.status and:
1261 scmutil.status and:
1248
1262
1249 unsure:
1263 unsure:
1250 files that might have been modified since the dirstate was
1264 files that might have been modified since the dirstate was
1251 written, but need to be read to be sure (size is the same
1265 written, but need to be read to be sure (size is the same
1252 but mtime differs)
1266 but mtime differs)
1253 status.modified:
1267 status.modified:
1254 files that have definitely been modified since the dirstate
1268 files that have definitely been modified since the dirstate
1255 was written (different size or mode)
1269 was written (different size or mode)
1256 status.clean:
1270 status.clean:
1257 files that have definitely not been modified since the
1271 files that have definitely not been modified since the
1258 dirstate was written
1272 dirstate was written
1259 """
1273 """
1260 listignored, listclean, listunknown = ignored, clean, unknown
1274 listignored, listclean, listunknown = ignored, clean, unknown
1261 lookup, modified, added, unknown, ignored = [], [], [], [], []
1275 lookup, modified, added, unknown, ignored = [], [], [], [], []
1262 removed, deleted, clean = [], [], []
1276 removed, deleted, clean = [], [], []
1263
1277
1264 dmap = self._map
1278 dmap = self._map
1265 dmap.preload()
1279 dmap.preload()
1266
1280
1267 use_rust = True
1281 use_rust = True
1268
1282
1269 allowed_matchers = (
1283 allowed_matchers = (
1270 matchmod.alwaysmatcher,
1284 matchmod.alwaysmatcher,
1271 matchmod.exactmatcher,
1285 matchmod.exactmatcher,
1272 matchmod.includematcher,
1286 matchmod.includematcher,
1273 matchmod.intersectionmatcher,
1287 matchmod.intersectionmatcher,
1274 matchmod.nevermatcher,
1288 matchmod.nevermatcher,
1275 matchmod.unionmatcher,
1289 matchmod.unionmatcher,
1276 )
1290 )
1277
1291
1278 if rustmod is None:
1292 if rustmod is None:
1279 use_rust = False
1293 use_rust = False
1280 elif self._checkcase:
1294 elif self._checkcase:
1281 # Case-insensitive filesystems are not handled yet
1295 # Case-insensitive filesystems are not handled yet
1282 use_rust = False
1296 use_rust = False
1283 elif subrepos:
1297 elif subrepos:
1284 use_rust = False
1298 use_rust = False
1285 elif self._sparsematchfn is not None:
1299 elif self._sparsematchfn is not None:
1286 use_rust = False
1300 use_rust = False
1287 elif not isinstance(match, allowed_matchers):
1301 elif not isinstance(match, allowed_matchers):
1288 # Some matchers have yet to be implemented
1302 # Some matchers have yet to be implemented
1289 use_rust = False
1303 use_rust = False
1290
1304
1291 # Get the time from the filesystem so we can disambiguate files that
1305 # Get the time from the filesystem so we can disambiguate files that
1292 # appear modified in the present or future.
1306 # appear modified in the present or future.
1293 try:
1307 try:
1294 mtime_boundary = timestamp.get_fs_now(self._opener)
1308 mtime_boundary = timestamp.get_fs_now(self._opener)
1295 except OSError:
1309 except OSError:
1296 # In largefiles or readonly context
1310 # In largefiles or readonly context
1297 mtime_boundary = None
1311 mtime_boundary = None
1298
1312
1299 if use_rust:
1313 if use_rust:
1300 try:
1314 try:
1301 res = self._rust_status(
1315 res = self._rust_status(
1302 match, listclean, listignored, listunknown
1316 match, listclean, listignored, listunknown
1303 )
1317 )
1304 return res + (mtime_boundary,)
1318 return res + (mtime_boundary,)
1305 except rustmod.FallbackError:
1319 except rustmod.FallbackError:
1306 pass
1320 pass
1307
1321
1308 def noop(f):
1322 def noop(f):
1309 pass
1323 pass
1310
1324
1311 dcontains = dmap.__contains__
1325 dcontains = dmap.__contains__
1312 dget = dmap.__getitem__
1326 dget = dmap.__getitem__
1313 ladd = lookup.append # aka "unsure"
1327 ladd = lookup.append # aka "unsure"
1314 madd = modified.append
1328 madd = modified.append
1315 aadd = added.append
1329 aadd = added.append
1316 uadd = unknown.append if listunknown else noop
1330 uadd = unknown.append if listunknown else noop
1317 iadd = ignored.append if listignored else noop
1331 iadd = ignored.append if listignored else noop
1318 radd = removed.append
1332 radd = removed.append
1319 dadd = deleted.append
1333 dadd = deleted.append
1320 cadd = clean.append if listclean else noop
1334 cadd = clean.append if listclean else noop
1321 mexact = match.exact
1335 mexact = match.exact
1322 dirignore = self._dirignore
1336 dirignore = self._dirignore
1323 checkexec = self._checkexec
1337 checkexec = self._checkexec
1324 checklink = self._checklink
1338 checklink = self._checklink
1325 copymap = self._map.copymap
1339 copymap = self._map.copymap
1326
1340
1327 # We need to do full walks when either
1341 # We need to do full walks when either
1328 # - we're listing all clean files, or
1342 # - we're listing all clean files, or
1329 # - match.traversedir does something, because match.traversedir should
1343 # - match.traversedir does something, because match.traversedir should
1330 # be called for every dir in the working dir
1344 # be called for every dir in the working dir
1331 full = listclean or match.traversedir is not None
1345 full = listclean or match.traversedir is not None
1332 for fn, st in self.walk(
1346 for fn, st in self.walk(
1333 match, subrepos, listunknown, listignored, full=full
1347 match, subrepos, listunknown, listignored, full=full
1334 ).items():
1348 ).items():
1335 if not dcontains(fn):
1349 if not dcontains(fn):
1336 if (listignored or mexact(fn)) and dirignore(fn):
1350 if (listignored or mexact(fn)) and dirignore(fn):
1337 if listignored:
1351 if listignored:
1338 iadd(fn)
1352 iadd(fn)
1339 else:
1353 else:
1340 uadd(fn)
1354 uadd(fn)
1341 continue
1355 continue
1342
1356
1343 t = dget(fn)
1357 t = dget(fn)
1344 mode = t.mode
1358 mode = t.mode
1345 size = t.size
1359 size = t.size
1346
1360
1347 if not st and t.tracked:
1361 if not st and t.tracked:
1348 dadd(fn)
1362 dadd(fn)
1349 elif t.p2_info:
1363 elif t.p2_info:
1350 madd(fn)
1364 madd(fn)
1351 elif t.added:
1365 elif t.added:
1352 aadd(fn)
1366 aadd(fn)
1353 elif t.removed:
1367 elif t.removed:
1354 radd(fn)
1368 radd(fn)
1355 elif t.tracked:
1369 elif t.tracked:
1356 if not checklink and t.has_fallback_symlink:
1370 if not checklink and t.has_fallback_symlink:
1357 # If the file system does not support symlink, the mode
1371 # If the file system does not support symlink, the mode
1358 # might not be correctly stored in the dirstate, so do not
1372 # might not be correctly stored in the dirstate, so do not
1359 # trust it.
1373 # trust it.
1360 ladd(fn)
1374 ladd(fn)
1361 elif not checkexec and t.has_fallback_exec:
1375 elif not checkexec and t.has_fallback_exec:
1362 # If the file system does not support exec bits, the mode
1376 # If the file system does not support exec bits, the mode
1363 # might not be correctly stored in the dirstate, so do not
1377 # might not be correctly stored in the dirstate, so do not
1364 # trust it.
1378 # trust it.
1365 ladd(fn)
1379 ladd(fn)
1366 elif (
1380 elif (
1367 size >= 0
1381 size >= 0
1368 and (
1382 and (
1369 (size != st.st_size and size != st.st_size & _rangemask)
1383 (size != st.st_size and size != st.st_size & _rangemask)
1370 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1384 or ((mode ^ st.st_mode) & 0o100 and checkexec)
1371 )
1385 )
1372 or fn in copymap
1386 or fn in copymap
1373 ):
1387 ):
1374 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1388 if stat.S_ISLNK(st.st_mode) and size != st.st_size:
1375 # issue6456: Size returned may be longer due to
1389 # issue6456: Size returned may be longer due to
1376 # encryption on EXT-4 fscrypt, undecided.
1390 # encryption on EXT-4 fscrypt, undecided.
1377 ladd(fn)
1391 ladd(fn)
1378 else:
1392 else:
1379 madd(fn)
1393 madd(fn)
1380 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1394 elif not t.mtime_likely_equal_to(timestamp.mtime_of(st)):
1381 # There might be a change in the future if for example the
1395 # There might be a change in the future if for example the
1382 # internal clock is off, but this is a case where the issues
1396 # internal clock is off, but this is a case where the issues
1383 # the user would face would be a lot worse and there is
1397 # the user would face would be a lot worse and there is
1384 # nothing we can really do.
1398 # nothing we can really do.
1385 ladd(fn)
1399 ladd(fn)
1386 elif listclean:
1400 elif listclean:
1387 cadd(fn)
1401 cadd(fn)
1388 status = scmutil.status(
1402 status = scmutil.status(
1389 modified, added, removed, deleted, unknown, ignored, clean
1403 modified, added, removed, deleted, unknown, ignored, clean
1390 )
1404 )
1391 return (lookup, status, mtime_boundary)
1405 return (lookup, status, mtime_boundary)
1392
1406
1393 def matches(self, match):
1407 def matches(self, match):
1394 """
1408 """
1395 return files in the dirstate (in whatever state) filtered by match
1409 return files in the dirstate (in whatever state) filtered by match
1396 """
1410 """
1397 dmap = self._map
1411 dmap = self._map
1398 if rustmod is not None:
1412 if rustmod is not None:
1399 dmap = self._map._map
1413 dmap = self._map._map
1400
1414
1401 if match.always():
1415 if match.always():
1402 return dmap.keys()
1416 return dmap.keys()
1403 files = match.files()
1417 files = match.files()
1404 if match.isexact():
1418 if match.isexact():
1405 # fast path -- filter the other way around, since typically files is
1419 # fast path -- filter the other way around, since typically files is
1406 # much smaller than dmap
1420 # much smaller than dmap
1407 return [f for f in files if f in dmap]
1421 return [f for f in files if f in dmap]
1408 if match.prefix() and all(fn in dmap for fn in files):
1422 if match.prefix() and all(fn in dmap for fn in files):
1409 # fast path -- all the values are known to be files, so just return
1423 # fast path -- all the values are known to be files, so just return
1410 # that
1424 # that
1411 return list(files)
1425 return list(files)
1412 return [f for f in dmap if match(f)]
1426 return [f for f in dmap if match(f)]
1413
1427
1414 def _actualfilename(self, tr):
1428 def _actualfilename(self, tr):
1415 if tr:
1429 if tr:
1416 return self._pendingfilename
1430 return self._pendingfilename
1417 else:
1431 else:
1418 return self._filename
1432 return self._filename
1419
1433
1420 def savebackup(self, tr, backupname):
1434 def savebackup(self, tr, backupname):
1421 '''Save current dirstate into backup file'''
1435 '''Save current dirstate into backup file'''
1422 filename = self._actualfilename(tr)
1436 filename = self._actualfilename(tr)
1423 assert backupname != filename
1437 assert backupname != filename
1424
1438
1425 # use '_writedirstate' instead of 'write' to write changes certainly,
1439 # use '_writedirstate' instead of 'write' to write changes certainly,
1426 # because the latter omits writing out if transaction is running.
1440 # because the latter omits writing out if transaction is running.
1427 # output file will be used to create backup of dirstate at this point.
1441 # output file will be used to create backup of dirstate at this point.
1428 if self._dirty or not self._opener.exists(filename):
1442 if self._dirty or not self._opener.exists(filename):
1429 self._writedirstate(
1443 self._writedirstate(
1430 tr,
1444 tr,
1431 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1445 self._opener(filename, b"w", atomictemp=True, checkambig=True),
1432 )
1446 )
1433
1447
1434 if tr:
1448 if tr:
1435 # ensure that subsequent tr.writepending returns True for
1449 # ensure that subsequent tr.writepending returns True for
1436 # changes written out above, even if dirstate is never
1450 # changes written out above, even if dirstate is never
1437 # changed after this
1451 # changed after this
1438 tr.addfilegenerator(
1452 tr.addfilegenerator(
1439 b'dirstate-1-main',
1453 b'dirstate-1-main',
1440 (self._filename,),
1454 (self._filename,),
1441 lambda f: self._writedirstate(tr, f),
1455 lambda f: self._writedirstate(tr, f),
1442 location=b'plain',
1456 location=b'plain',
1443 post_finalize=True,
1457 post_finalize=True,
1444 )
1458 )
1445
1459
1446 # ensure that pending file written above is unlinked at
1460 # ensure that pending file written above is unlinked at
1447 # failure, even if tr.writepending isn't invoked until the
1461 # failure, even if tr.writepending isn't invoked until the
1448 # end of this transaction
1462 # end of this transaction
1449 tr.registertmp(filename, location=b'plain')
1463 tr.registertmp(filename, location=b'plain')
1450
1464
1451 self._opener.tryunlink(backupname)
1465 self._opener.tryunlink(backupname)
1452 # hardlink backup is okay because _writedirstate is always called
1466 # hardlink backup is okay because _writedirstate is always called
1453 # with an "atomictemp=True" file.
1467 # with an "atomictemp=True" file.
1454 util.copyfile(
1468 util.copyfile(
1455 self._opener.join(filename),
1469 self._opener.join(filename),
1456 self._opener.join(backupname),
1470 self._opener.join(backupname),
1457 hardlink=True,
1471 hardlink=True,
1458 )
1472 )
1459
1473
1460 def restorebackup(self, tr, backupname):
1474 def restorebackup(self, tr, backupname):
1461 '''Restore dirstate by backup file'''
1475 '''Restore dirstate by backup file'''
1462 # this "invalidate()" prevents "wlock.release()" from writing
1476 # this "invalidate()" prevents "wlock.release()" from writing
1463 # changes of dirstate out after restoring from backup file
1477 # changes of dirstate out after restoring from backup file
1464 self.invalidate()
1478 self.invalidate()
1465 filename = self._actualfilename(tr)
1479 filename = self._actualfilename(tr)
1466 o = self._opener
1480 o = self._opener
1467 if util.samefile(o.join(backupname), o.join(filename)):
1481 if util.samefile(o.join(backupname), o.join(filename)):
1468 o.unlink(backupname)
1482 o.unlink(backupname)
1469 else:
1483 else:
1470 o.rename(backupname, filename, checkambig=True)
1484 o.rename(backupname, filename, checkambig=True)
1471
1485
1472 def clearbackup(self, tr, backupname):
1486 def clearbackup(self, tr, backupname):
1473 '''Clear backup file'''
1487 '''Clear backup file'''
1474 self._opener.unlink(backupname)
1488 self._opener.unlink(backupname)
1475
1489
1476 def verify(self, m1, m2):
1490 def verify(self, m1, m2):
1477 """check the dirstate content again the parent manifest and yield errors"""
1491 """check the dirstate content again the parent manifest and yield errors"""
1478 missing_from_p1 = b"%s in state %s, but not in manifest1\n"
1492 missing_from_p1 = b"%s in state %s, but not in manifest1\n"
1479 unexpected_in_p1 = b"%s in state %s, but also in manifest1\n"
1493 unexpected_in_p1 = b"%s in state %s, but also in manifest1\n"
1480 missing_from_ps = b"%s in state %s, but not in either manifest\n"
1494 missing_from_ps = b"%s in state %s, but not in either manifest\n"
1481 missing_from_ds = b"%s in manifest1, but listed as state %s\n"
1495 missing_from_ds = b"%s in manifest1, but listed as state %s\n"
1482 for f, entry in self.items():
1496 for f, entry in self.items():
1483 state = entry.state
1497 state = entry.state
1484 if state in b"nr" and f not in m1:
1498 if state in b"nr" and f not in m1:
1485 yield (missing_from_p1, f, state)
1499 yield (missing_from_p1, f, state)
1486 if state in b"a" and f in m1:
1500 if state in b"a" and f in m1:
1487 yield (unexpected_in_p1, f, state)
1501 yield (unexpected_in_p1, f, state)
1488 if state in b"m" and f not in m1 and f not in m2:
1502 if state in b"m" and f not in m1 and f not in m2:
1489 yield (missing_from_ps, f, state)
1503 yield (missing_from_ps, f, state)
1490 for f in m1:
1504 for f in m1:
1491 state = self.get_entry(f).state
1505 state = self.get_entry(f).state
1492 if state not in b"nrm":
1506 if state not in b"nrm":
1493 yield (missing_from_ds, f, state)
1507 yield (missing_from_ds, f, state)
General Comments 0
You need to be logged in to leave comments. Login now