Show More
@@ -1,454 +1,459 b'' | |||||
1 | # sparse.py - allow sparse checkouts of the working directory |
|
1 | # sparse.py - allow sparse checkouts of the working directory | |
2 | # |
|
2 | # | |
3 | # Copyright 2014 Facebook, Inc. |
|
3 | # Copyright 2014 Facebook, Inc. | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | """allow sparse checkouts of the working directory (EXPERIMENTAL) |
|
8 | """allow sparse checkouts of the working directory (EXPERIMENTAL) | |
9 |
|
9 | |||
10 | (This extension is not yet protected by backwards compatibility |
|
10 | (This extension is not yet protected by backwards compatibility | |
11 | guarantees. Any aspect may break in future releases until this |
|
11 | guarantees. Any aspect may break in future releases until this | |
12 | notice is removed.) |
|
12 | notice is removed.) | |
13 |
|
13 | |||
14 | This extension allows the working directory to only consist of a |
|
14 | This extension allows the working directory to only consist of a | |
15 | subset of files for the revision. This allows specific files or |
|
15 | subset of files for the revision. This allows specific files or | |
16 | directories to be explicitly included or excluded. Many repository |
|
16 | directories to be explicitly included or excluded. Many repository | |
17 | operations have performance proportional to the number of files in |
|
17 | operations have performance proportional to the number of files in | |
18 | the working directory. So only realizing a subset of files in the |
|
18 | the working directory. So only realizing a subset of files in the | |
19 | working directory can improve performance. |
|
19 | working directory can improve performance. | |
20 |
|
20 | |||
21 | Sparse Config Files |
|
21 | Sparse Config Files | |
22 | ------------------- |
|
22 | ------------------- | |
23 |
|
23 | |||
24 | The set of files that are part of a sparse checkout are defined by |
|
24 | The set of files that are part of a sparse checkout are defined by | |
25 | a sparse config file. The file defines 3 things: includes (files to |
|
25 | a sparse config file. The file defines 3 things: includes (files to | |
26 | include in the sparse checkout), excludes (files to exclude from the |
|
26 | include in the sparse checkout), excludes (files to exclude from the | |
27 | sparse checkout), and profiles (links to other config files). |
|
27 | sparse checkout), and profiles (links to other config files). | |
28 |
|
28 | |||
29 | The file format is newline delimited. Empty lines and lines beginning |
|
29 | The file format is newline delimited. Empty lines and lines beginning | |
30 | with ``#`` are ignored. |
|
30 | with ``#`` are ignored. | |
31 |
|
31 | |||
32 | Lines beginning with ``%include `` denote another sparse config file |
|
32 | Lines beginning with ``%include `` denote another sparse config file | |
33 | to include. e.g. ``%include tests.sparse``. The filename is relative |
|
33 | to include. e.g. ``%include tests.sparse``. The filename is relative | |
34 | to the repository root. |
|
34 | to the repository root. | |
35 |
|
35 | |||
36 | The special lines ``[include]`` and ``[exclude]`` denote the section |
|
36 | The special lines ``[include]`` and ``[exclude]`` denote the section | |
37 | for includes and excludes that follow, respectively. It is illegal to |
|
37 | for includes and excludes that follow, respectively. It is illegal to | |
38 | have ``[include]`` after ``[exclude]``. |
|
38 | have ``[include]`` after ``[exclude]``. | |
39 |
|
39 | |||
40 | Non-special lines resemble file patterns to be added to either includes |
|
40 | Non-special lines resemble file patterns to be added to either includes | |
41 | or excludes. The syntax of these lines is documented by :hg:`help patterns`. |
|
41 | or excludes. The syntax of these lines is documented by :hg:`help patterns`. | |
42 | Patterns are interpreted as ``glob:`` by default and match against the |
|
42 | Patterns are interpreted as ``glob:`` by default and match against the | |
43 | root of the repository. |
|
43 | root of the repository. | |
44 |
|
44 | |||
45 | Exclusion patterns take precedence over inclusion patterns. So even |
|
45 | Exclusion patterns take precedence over inclusion patterns. So even | |
46 | if a file is explicitly included, an ``[exclude]`` entry can remove it. |
|
46 | if a file is explicitly included, an ``[exclude]`` entry can remove it. | |
47 |
|
47 | |||
48 | For example, say you have a repository with 3 directories, ``frontend/``, |
|
48 | For example, say you have a repository with 3 directories, ``frontend/``, | |
49 | ``backend/``, and ``tools/``. ``frontend/`` and ``backend/`` correspond |
|
49 | ``backend/``, and ``tools/``. ``frontend/`` and ``backend/`` correspond | |
50 | to different projects and it is uncommon for someone working on one |
|
50 | to different projects and it is uncommon for someone working on one | |
51 | to need the files for the other. But ``tools/`` contains files shared |
|
51 | to need the files for the other. But ``tools/`` contains files shared | |
52 | between both projects. Your sparse config files may resemble:: |
|
52 | between both projects. Your sparse config files may resemble:: | |
53 |
|
53 | |||
54 | # frontend.sparse |
|
54 | # frontend.sparse | |
55 | frontend/** |
|
55 | frontend/** | |
56 | tools/** |
|
56 | tools/** | |
57 |
|
57 | |||
58 | # backend.sparse |
|
58 | # backend.sparse | |
59 | backend/** |
|
59 | backend/** | |
60 | tools/** |
|
60 | tools/** | |
61 |
|
61 | |||
62 | Say the backend grows in size. Or there's a directory with thousands |
|
62 | Say the backend grows in size. Or there's a directory with thousands | |
63 | of files you wish to exclude. You can modify the profile to exclude |
|
63 | of files you wish to exclude. You can modify the profile to exclude | |
64 | certain files:: |
|
64 | certain files:: | |
65 |
|
65 | |||
66 | [include] |
|
66 | [include] | |
67 | backend/** |
|
67 | backend/** | |
68 | tools/** |
|
68 | tools/** | |
69 |
|
69 | |||
70 | [exclude] |
|
70 | [exclude] | |
71 | tools/tests/** |
|
71 | tools/tests/** | |
72 | """ |
|
72 | """ | |
73 |
|
73 | |||
74 |
|
74 | |||
75 | from mercurial.i18n import _ |
|
75 | from mercurial.i18n import _ | |
76 | from mercurial.pycompat import setattr |
|
76 | from mercurial.pycompat import setattr | |
77 | from mercurial import ( |
|
77 | from mercurial import ( | |
78 | cmdutil, |
|
78 | cmdutil, | |
79 | commands, |
|
79 | commands, | |
80 | dirstate, |
|
80 | dirstate, | |
81 | error, |
|
81 | error, | |
82 | extensions, |
|
82 | extensions, | |
83 | logcmdutil, |
|
83 | logcmdutil, | |
84 | match as matchmod, |
|
84 | match as matchmod, | |
85 | merge as mergemod, |
|
85 | merge as mergemod, | |
86 | pycompat, |
|
86 | pycompat, | |
87 | registrar, |
|
87 | registrar, | |
88 | sparse, |
|
88 | sparse, | |
89 | util, |
|
89 | util, | |
90 | ) |
|
90 | ) | |
91 |
|
91 | |||
92 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for |
|
92 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for | |
93 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should |
|
93 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should | |
94 | # be specifying the version(s) of Mercurial they are tested with, or |
|
94 | # be specifying the version(s) of Mercurial they are tested with, or | |
95 | # leave the attribute unspecified. |
|
95 | # leave the attribute unspecified. | |
96 | testedwith = b'ships-with-hg-core' |
|
96 | testedwith = b'ships-with-hg-core' | |
97 |
|
97 | |||
98 | cmdtable = {} |
|
98 | cmdtable = {} | |
99 | command = registrar.command(cmdtable) |
|
99 | command = registrar.command(cmdtable) | |
100 |
|
100 | |||
101 |
|
101 | |||
102 | def extsetup(ui): |
|
102 | def extsetup(ui): | |
103 | sparse.enabled = True |
|
103 | sparse.enabled = True | |
104 |
|
104 | |||
105 | _setupclone(ui) |
|
105 | _setupclone(ui) | |
106 | _setuplog(ui) |
|
106 | _setuplog(ui) | |
107 | _setupadd(ui) |
|
107 | _setupadd(ui) | |
108 | _setupdirstate(ui) |
|
108 | _setupdirstate(ui) | |
109 |
|
109 | |||
110 |
|
110 | |||
111 | def replacefilecache(cls, propname, replacement): |
|
111 | def replacefilecache(cls, propname, replacement): | |
112 | """Replace a filecache property with a new class. This allows changing the |
|
112 | """Replace a filecache property with a new class. This allows changing the | |
113 | cache invalidation condition.""" |
|
113 | cache invalidation condition.""" | |
114 | origcls = cls |
|
114 | origcls = cls | |
115 | assert callable(replacement) |
|
115 | assert callable(replacement) | |
116 | while cls is not object: |
|
116 | while cls is not object: | |
117 | if propname in cls.__dict__: |
|
117 | if propname in cls.__dict__: | |
118 | orig = cls.__dict__[propname] |
|
118 | orig = cls.__dict__[propname] | |
119 | setattr(cls, propname, replacement(orig)) |
|
119 | setattr(cls, propname, replacement(orig)) | |
120 | break |
|
120 | break | |
121 | cls = cls.__bases__[0] |
|
121 | cls = cls.__bases__[0] | |
122 |
|
122 | |||
123 | if cls is object: |
|
123 | if cls is object: | |
124 | raise AttributeError( |
|
124 | raise AttributeError( | |
125 | _(b"type '%s' has no property '%s'") % (origcls, propname) |
|
125 | _(b"type '%s' has no property '%s'") % (origcls, propname) | |
126 | ) |
|
126 | ) | |
127 |
|
127 | |||
128 |
|
128 | |||
129 | def _setuplog(ui): |
|
129 | def _setuplog(ui): | |
130 | entry = commands.table[b'log|history'] |
|
130 | entry = commands.table[b'log|history'] | |
131 | entry[1].append( |
|
131 | entry[1].append( | |
132 | ( |
|
132 | ( | |
133 | b'', |
|
133 | b'', | |
134 | b'sparse', |
|
134 | b'sparse', | |
135 | None, |
|
135 | None, | |
136 | b"limit to changesets affecting the sparse checkout", |
|
136 | b"limit to changesets affecting the sparse checkout", | |
137 | ) |
|
137 | ) | |
138 | ) |
|
138 | ) | |
139 |
|
139 | |||
140 | def _initialrevs(orig, repo, wopts): |
|
140 | def _initialrevs(orig, repo, wopts): | |
141 | revs = orig(repo, wopts) |
|
141 | revs = orig(repo, wopts) | |
142 | if wopts.opts.get(b'sparse'): |
|
142 | if wopts.opts.get(b'sparse'): | |
143 | sparsematch = sparse.matcher(repo) |
|
143 | sparsematch = sparse.matcher(repo) | |
144 |
|
144 | |||
145 | def ctxmatch(rev): |
|
145 | def ctxmatch(rev): | |
146 | ctx = repo[rev] |
|
146 | ctx = repo[rev] | |
147 | return any(f for f in ctx.files() if sparsematch(f)) |
|
147 | return any(f for f in ctx.files() if sparsematch(f)) | |
148 |
|
148 | |||
149 | revs = revs.filter(ctxmatch) |
|
149 | revs = revs.filter(ctxmatch) | |
150 | return revs |
|
150 | return revs | |
151 |
|
151 | |||
152 | extensions.wrapfunction(logcmdutil, b'_initialrevs', _initialrevs) |
|
152 | extensions.wrapfunction(logcmdutil, b'_initialrevs', _initialrevs) | |
153 |
|
153 | |||
154 |
|
154 | |||
155 | def _clonesparsecmd(orig, ui, repo, *args, **opts): |
|
155 | def _clonesparsecmd(orig, ui, repo, *args, **opts): | |
156 | include = opts.get('include') |
|
156 | include = opts.get('include') | |
157 | exclude = opts.get('exclude') |
|
157 | exclude = opts.get('exclude') | |
158 | enableprofile = opts.get('enable_profile') |
|
158 | enableprofile = opts.get('enable_profile') | |
159 | narrow_pat = opts.get('narrow') |
|
159 | narrow_pat = opts.get('narrow') | |
160 |
|
160 | |||
161 | # if --narrow is passed, it means they are includes and excludes for narrow |
|
161 | # if --narrow is passed, it means they are includes and excludes for narrow | |
162 | # clone |
|
162 | # clone | |
163 | if not narrow_pat and (include or exclude or enableprofile): |
|
163 | if not narrow_pat and (include or exclude or enableprofile): | |
164 |
|
164 | |||
165 | def clonesparse(orig, ctx, *args, **kwargs): |
|
165 | def clonesparse(orig, ctx, *args, **kwargs): | |
166 | sparse.updateconfig( |
|
166 | sparse.updateconfig( | |
167 | ctx.repo().unfiltered(), |
|
167 | ctx.repo().unfiltered(), | |
168 | {}, |
|
168 | {}, | |
169 | include=include, |
|
169 | include=include, | |
170 | exclude=exclude, |
|
170 | exclude=exclude, | |
171 | enableprofile=enableprofile, |
|
171 | enableprofile=enableprofile, | |
172 | usereporootpaths=True, |
|
172 | usereporootpaths=True, | |
173 | ) |
|
173 | ) | |
174 | return orig(ctx, *args, **kwargs) |
|
174 | return orig(ctx, *args, **kwargs) | |
175 |
|
175 | |||
176 | extensions.wrapfunction(mergemod, b'update', clonesparse) |
|
176 | extensions.wrapfunction(mergemod, b'update', clonesparse) | |
177 | return orig(ui, repo, *args, **opts) |
|
177 | return orig(ui, repo, *args, **opts) | |
178 |
|
178 | |||
179 |
|
179 | |||
180 | def _setupclone(ui): |
|
180 | def _setupclone(ui): | |
181 | entry = commands.table[b'clone'] |
|
181 | entry = commands.table[b'clone'] | |
182 | entry[1].append((b'', b'enable-profile', [], b'enable a sparse profile')) |
|
182 | entry[1].append((b'', b'enable-profile', [], b'enable a sparse profile')) | |
183 | entry[1].append((b'', b'include', [], b'include sparse pattern')) |
|
183 | entry[1].append((b'', b'include', [], b'include sparse pattern')) | |
184 | entry[1].append((b'', b'exclude', [], b'exclude sparse pattern')) |
|
184 | entry[1].append((b'', b'exclude', [], b'exclude sparse pattern')) | |
185 | extensions.wrapcommand(commands.table, b'clone', _clonesparsecmd) |
|
185 | extensions.wrapcommand(commands.table, b'clone', _clonesparsecmd) | |
186 |
|
186 | |||
187 |
|
187 | |||
188 | def _setupadd(ui): |
|
188 | def _setupadd(ui): | |
189 | entry = commands.table[b'add'] |
|
189 | entry = commands.table[b'add'] | |
190 | entry[1].append( |
|
190 | entry[1].append( | |
191 | ( |
|
191 | ( | |
192 | b's', |
|
192 | b's', | |
193 | b'sparse', |
|
193 | b'sparse', | |
194 | None, |
|
194 | None, | |
195 | b'also include directories of added files in sparse config', |
|
195 | b'also include directories of added files in sparse config', | |
196 | ) |
|
196 | ) | |
197 | ) |
|
197 | ) | |
198 |
|
198 | |||
199 | def _add(orig, ui, repo, *pats, **opts): |
|
199 | def _add(orig, ui, repo, *pats, **opts): | |
200 | if opts.get('sparse'): |
|
200 | if opts.get('sparse'): | |
201 | dirs = set() |
|
201 | dirs = set() | |
202 | for pat in pats: |
|
202 | for pat in pats: | |
203 | dirname, basename = util.split(pat) |
|
203 | dirname, basename = util.split(pat) | |
204 | dirs.add(dirname) |
|
204 | dirs.add(dirname) | |
205 | sparse.updateconfig(repo, opts, include=list(dirs)) |
|
205 | sparse.updateconfig(repo, opts, include=list(dirs)) | |
206 | return orig(ui, repo, *pats, **opts) |
|
206 | return orig(ui, repo, *pats, **opts) | |
207 |
|
207 | |||
208 | extensions.wrapcommand(commands.table, b'add', _add) |
|
208 | extensions.wrapcommand(commands.table, b'add', _add) | |
209 |
|
209 | |||
210 |
|
210 | |||
211 | def _setupdirstate(ui): |
|
211 | def _setupdirstate(ui): | |
212 | """Modify the dirstate to prevent stat'ing excluded files, |
|
212 | """Modify the dirstate to prevent stat'ing excluded files, | |
213 | and to prevent modifications to files outside the checkout. |
|
213 | and to prevent modifications to files outside the checkout. | |
214 | """ |
|
214 | """ | |
215 |
|
215 | |||
216 | def walk(orig, self, match, subrepos, unknown, ignored, full=True): |
|
216 | def walk(orig, self, match, subrepos, unknown, ignored, full=True): | |
217 | # hack to not exclude explicitly-specified paths so that they can |
|
217 | # hack to not exclude explicitly-specified paths so that they can | |
218 | # be warned later on e.g. dirstate.add() |
|
218 | # be warned later on e.g. dirstate.add() | |
219 | em = matchmod.exact(match.files()) |
|
219 | em = matchmod.exact(match.files()) | |
220 | sm = matchmod.unionmatcher([self._sparsematcher, em]) |
|
220 | sm = matchmod.unionmatcher([self._sparsematcher, em]) | |
221 | match = matchmod.intersectmatchers(match, sm) |
|
221 | match = matchmod.intersectmatchers(match, sm) | |
222 | return orig(self, match, subrepos, unknown, ignored, full) |
|
222 | return orig(self, match, subrepos, unknown, ignored, full) | |
223 |
|
223 | |||
224 | extensions.wrapfunction(dirstate.dirstate, b'walk', walk) |
|
224 | extensions.wrapfunction(dirstate.dirstate, b'walk', walk) | |
225 |
|
225 | |||
226 | # dirstate.rebuild should not add non-matching files |
|
226 | # dirstate.rebuild should not add non-matching files | |
227 | def _rebuild(orig, self, parent, allfiles, changedfiles=None): |
|
227 | def _rebuild(orig, self, parent, allfiles, changedfiles=None): | |
228 | matcher = self._sparsematcher |
|
228 | matcher = self._sparsematcher | |
229 | if not matcher.always(): |
|
229 | if not matcher.always(): | |
230 | allfiles = [f for f in allfiles if matcher(f)] |
|
230 | allfiles = [f for f in allfiles if matcher(f)] | |
231 | if changedfiles: |
|
231 | if changedfiles: | |
232 | changedfiles = [f for f in changedfiles if matcher(f)] |
|
232 | changedfiles = [f for f in changedfiles if matcher(f)] | |
233 |
|
233 | |||
234 | if changedfiles is not None: |
|
234 | if changedfiles is not None: | |
235 | # In _rebuild, these files will be deleted from the dirstate |
|
235 | # In _rebuild, these files will be deleted from the dirstate | |
236 | # when they are not found to be in allfiles |
|
236 | # when they are not found to be in allfiles | |
237 | dirstatefilestoremove = {f for f in self if not matcher(f)} |
|
237 | dirstatefilestoremove = {f for f in self if not matcher(f)} | |
238 | changedfiles = dirstatefilestoremove.union(changedfiles) |
|
238 | changedfiles = dirstatefilestoremove.union(changedfiles) | |
239 |
|
239 | |||
240 | return orig(self, parent, allfiles, changedfiles) |
|
240 | return orig(self, parent, allfiles, changedfiles) | |
241 |
|
241 | |||
242 | extensions.wrapfunction(dirstate.dirstate, b'rebuild', _rebuild) |
|
242 | extensions.wrapfunction(dirstate.dirstate, b'rebuild', _rebuild) | |
243 |
|
243 | |||
244 | # Prevent adding files that are outside the sparse checkout |
|
244 | # Prevent adding files that are outside the sparse checkout | |
245 | editfuncs = [ |
|
245 | editfuncs = [ | |
246 | b'set_tracked', |
|
246 | b'set_tracked', | |
247 | b'set_untracked', |
|
247 | b'set_untracked', | |
248 | b'copy', |
|
248 | b'copy', | |
249 | ] |
|
249 | ] | |
250 | hint = _( |
|
250 | hint = _( | |
251 | b'include file with `hg debugsparse --include <pattern>` or use ' |
|
251 | b'include file with `hg debugsparse --include <pattern>` or use ' | |
252 | + b'`hg add -s <file>` to include file directory while adding' |
|
252 | + b'`hg add -s <file>` to include file directory while adding' | |
253 | ) |
|
253 | ) | |
254 | for func in editfuncs: |
|
254 | for func in editfuncs: | |
255 |
|
255 | |||
256 | def _wrapper(orig, self, *args, **kwargs): |
|
256 | def _wrapper(orig, self, *args, **kwargs): | |
257 | sparsematch = self._sparsematcher |
|
257 | sparsematch = self._sparsematcher | |
258 | if not sparsematch.always(): |
|
258 | if not sparsematch.always(): | |
259 | for f in args: |
|
259 | for f in args: | |
260 | if f is not None and not sparsematch(f) and f not in self: |
|
260 | if f is not None and not sparsematch(f) and f not in self: | |
261 | raise error.Abort( |
|
261 | raise error.Abort( | |
262 | _( |
|
262 | _( | |
263 | b"cannot add '%s' - it is outside " |
|
263 | b"cannot add '%s' - it is outside " | |
264 | b"the sparse checkout" |
|
264 | b"the sparse checkout" | |
265 | ) |
|
265 | ) | |
266 | % f, |
|
266 | % f, | |
267 | hint=hint, |
|
267 | hint=hint, | |
268 | ) |
|
268 | ) | |
269 | return orig(self, *args, **kwargs) |
|
269 | return orig(self, *args, **kwargs) | |
270 |
|
270 | |||
271 | extensions.wrapfunction(dirstate.dirstate, func, _wrapper) |
|
271 | extensions.wrapfunction(dirstate.dirstate, func, _wrapper) | |
272 |
|
272 | |||
273 |
|
273 | |||
274 | @command( |
|
274 | @command( | |
275 | b'debugsparse', |
|
275 | b'debugsparse', | |
276 | [ |
|
276 | [ | |
277 | ( |
|
277 | ( | |
278 | b'I', |
|
278 | b'I', | |
279 | b'include', |
|
279 | b'include', | |
280 | [], |
|
280 | [], | |
281 | _(b'include files in the sparse checkout'), |
|
281 | _(b'include files in the sparse checkout'), | |
282 | _(b'PATTERN'), |
|
282 | _(b'PATTERN'), | |
283 | ), |
|
283 | ), | |
284 | ( |
|
284 | ( | |
285 | b'X', |
|
285 | b'X', | |
286 | b'exclude', |
|
286 | b'exclude', | |
287 | [], |
|
287 | [], | |
288 | _(b'exclude files in the sparse checkout'), |
|
288 | _(b'exclude files in the sparse checkout'), | |
289 | _(b'PATTERN'), |
|
289 | _(b'PATTERN'), | |
290 | ), |
|
290 | ), | |
291 | ( |
|
291 | ( | |
292 | b'd', |
|
292 | b'd', | |
293 | b'delete', |
|
293 | b'delete', | |
294 | [], |
|
294 | [], | |
295 | _(b'delete an include/exclude rule'), |
|
295 | _(b'delete an include/exclude rule'), | |
296 | _(b'PATTERN'), |
|
296 | _(b'PATTERN'), | |
297 | ), |
|
297 | ), | |
298 | ( |
|
298 | ( | |
299 | b'f', |
|
299 | b'f', | |
300 | b'force', |
|
300 | b'force', | |
301 | False, |
|
301 | False, | |
302 | _(b'allow changing rules even with pending changes'), |
|
302 | _(b'allow changing rules even with pending changes'), | |
303 | ), |
|
303 | ), | |
304 | ( |
|
304 | ( | |
305 | b'', |
|
305 | b'', | |
306 | b'enable-profile', |
|
306 | b'enable-profile', | |
307 | [], |
|
307 | [], | |
308 | _(b'enables the specified profile'), |
|
308 | _(b'enables the specified profile'), | |
309 | _(b'PATTERN'), |
|
309 | _(b'PATTERN'), | |
310 | ), |
|
310 | ), | |
311 | ( |
|
311 | ( | |
312 | b'', |
|
312 | b'', | |
313 | b'disable-profile', |
|
313 | b'disable-profile', | |
314 | [], |
|
314 | [], | |
315 | _(b'disables the specified profile'), |
|
315 | _(b'disables the specified profile'), | |
316 | _(b'PATTERN'), |
|
316 | _(b'PATTERN'), | |
317 | ), |
|
317 | ), | |
318 | ( |
|
318 | ( | |
319 | b'', |
|
319 | b'', | |
320 | b'import-rules', |
|
320 | b'import-rules', | |
321 | [], |
|
321 | [], | |
322 | _(b'imports rules from a file'), |
|
322 | _(b'imports rules from a file'), | |
323 | _(b'PATTERN'), |
|
323 | _(b'PATTERN'), | |
324 | ), |
|
324 | ), | |
325 | (b'', b'clear-rules', False, _(b'clears local include/exclude rules')), |
|
325 | (b'', b'clear-rules', False, _(b'clears local include/exclude rules')), | |
326 | ( |
|
326 | ( | |
327 | b'', |
|
327 | b'', | |
328 | b'refresh', |
|
328 | b'refresh', | |
329 | False, |
|
329 | False, | |
330 | _(b'updates the working after sparseness changes'), |
|
330 | _(b'updates the working after sparseness changes'), | |
331 | ), |
|
331 | ), | |
332 | (b'', b'reset', False, _(b'makes the repo full again')), |
|
332 | (b'', b'reset', False, _(b'makes the repo full again')), | |
333 | ] |
|
333 | ] | |
334 | + commands.templateopts, |
|
334 | + commands.templateopts, | |
335 | _(b'[--OPTION]'), |
|
335 | _(b'[--OPTION]'), | |
336 | helpbasic=True, |
|
336 | helpbasic=True, | |
337 | ) |
|
337 | ) | |
338 | def debugsparse(ui, repo, **opts): |
|
338 | def debugsparse(ui, repo, **opts): | |
339 | """make the current checkout sparse, or edit the existing checkout |
|
339 | """make the current checkout sparse, or edit the existing checkout | |
340 |
|
340 | |||
341 | The sparse command is used to make the current checkout sparse. |
|
341 | The sparse command is used to make the current checkout sparse. | |
342 | This means files that don't meet the sparse condition will not be |
|
342 | This means files that don't meet the sparse condition will not be | |
343 | written to disk, or show up in any working copy operations. It does |
|
343 | written to disk, or show up in any working copy operations. It does | |
344 | not affect files in history in any way. |
|
344 | not affect files in history in any way. | |
345 |
|
345 | |||
346 | Passing no arguments prints the currently applied sparse rules. |
|
346 | Passing no arguments prints the currently applied sparse rules. | |
347 |
|
347 | |||
348 | --include and --exclude are used to add and remove files from the sparse |
|
348 | --include and --exclude are used to add and remove files from the sparse | |
349 | checkout. The effects of adding an include or exclude rule are applied |
|
349 | checkout. The effects of adding an include or exclude rule are applied | |
350 | immediately. If applying the new rule would cause a file with pending |
|
350 | immediately. If applying the new rule would cause a file with pending | |
351 | changes to be added or removed, the command will fail. Pass --force to |
|
351 | changes to be added or removed, the command will fail. Pass --force to | |
352 | force a rule change even with pending changes (the changes on disk will |
|
352 | force a rule change even with pending changes (the changes on disk will | |
353 | be preserved). |
|
353 | be preserved). | |
354 |
|
354 | |||
355 | --delete removes an existing include/exclude rule. The effects are |
|
355 | --delete removes an existing include/exclude rule. The effects are | |
356 | immediate. |
|
356 | immediate. | |
357 |
|
357 | |||
358 | --refresh refreshes the files on disk based on the sparse rules. This is |
|
358 | --refresh refreshes the files on disk based on the sparse rules. This is | |
359 | only necessary if .hg/sparse was changed by hand. |
|
359 | only necessary if .hg/sparse was changed by hand. | |
360 |
|
360 | |||
361 | --enable-profile and --disable-profile accept a path to a .hgsparse file. |
|
361 | --enable-profile and --disable-profile accept a path to a .hgsparse file. | |
362 | This allows defining sparse checkouts and tracking them inside the |
|
362 | This allows defining sparse checkouts and tracking them inside the | |
363 | repository. This is useful for defining commonly used sparse checkouts for |
|
363 | repository. This is useful for defining commonly used sparse checkouts for | |
364 | many people to use. As the profile definition changes over time, the sparse |
|
364 | many people to use. As the profile definition changes over time, the sparse | |
365 | checkout will automatically be updated appropriately, depending on which |
|
365 | checkout will automatically be updated appropriately, depending on which | |
366 | changeset is checked out. Changes to .hgsparse are not applied until they |
|
366 | changeset is checked out. Changes to .hgsparse are not applied until they | |
367 | have been committed. |
|
367 | have been committed. | |
368 |
|
368 | |||
369 | --import-rules accepts a path to a file containing rules in the .hgsparse |
|
369 | --import-rules accepts a path to a file containing rules in the .hgsparse | |
370 | format, allowing you to add --include, --exclude and --enable-profile rules |
|
370 | format, allowing you to add --include, --exclude and --enable-profile rules | |
371 | in bulk. Like the --include, --exclude and --enable-profile switches, the |
|
371 | in bulk. Like the --include, --exclude and --enable-profile switches, the | |
372 | changes are applied immediately. |
|
372 | changes are applied immediately. | |
373 |
|
373 | |||
374 | --clear-rules removes all local include and exclude rules, while leaving |
|
374 | --clear-rules removes all local include and exclude rules, while leaving | |
375 | any enabled profiles in place. |
|
375 | any enabled profiles in place. | |
376 |
|
376 | |||
377 | Returns 0 if editing the sparse checkout succeeds. |
|
377 | Returns 0 if editing the sparse checkout succeeds. | |
378 | """ |
|
378 | """ | |
379 | opts = pycompat.byteskwargs(opts) |
|
379 | opts = pycompat.byteskwargs(opts) | |
380 | include = opts.get(b'include') |
|
380 | include = opts.get(b'include') | |
381 | exclude = opts.get(b'exclude') |
|
381 | exclude = opts.get(b'exclude') | |
382 | force = opts.get(b'force') |
|
382 | force = opts.get(b'force') | |
383 | enableprofile = opts.get(b'enable_profile') |
|
383 | enableprofile = opts.get(b'enable_profile') | |
384 | disableprofile = opts.get(b'disable_profile') |
|
384 | disableprofile = opts.get(b'disable_profile') | |
385 | importrules = opts.get(b'import_rules') |
|
385 | importrules = opts.get(b'import_rules') | |
386 | clearrules = opts.get(b'clear_rules') |
|
386 | clearrules = opts.get(b'clear_rules') | |
387 | delete = opts.get(b'delete') |
|
387 | delete = opts.get(b'delete') | |
388 | refresh = opts.get(b'refresh') |
|
388 | refresh = opts.get(b'refresh') | |
389 | reset = opts.get(b'reset') |
|
389 | reset = opts.get(b'reset') | |
390 | action = cmdutil.check_at_most_one_arg( |
|
390 | action = cmdutil.check_at_most_one_arg( | |
391 | opts, b'import_rules', b'clear_rules', b'refresh' |
|
391 | opts, b'import_rules', b'clear_rules', b'refresh' | |
392 | ) |
|
392 | ) | |
393 | updateconfig = bool( |
|
393 | updateconfig = bool( | |
394 | include or exclude or delete or reset or enableprofile or disableprofile |
|
394 | include or exclude or delete or reset or enableprofile or disableprofile | |
395 | ) |
|
395 | ) | |
396 | count = sum([updateconfig, bool(action)]) |
|
396 | count = sum([updateconfig, bool(action)]) | |
397 | if count > 1: |
|
397 | if count > 1: | |
398 | raise error.Abort(_(b"too many flags specified")) |
|
398 | raise error.Abort(_(b"too many flags specified")) | |
399 |
|
399 | |||
|
400 | # enable sparse on repo even if the requirements is missing. | |||
|
401 | repo._has_sparse = True | |||
|
402 | ||||
400 | if count == 0: |
|
403 | if count == 0: | |
401 | if repo.vfs.exists(b'sparse'): |
|
404 | if repo.vfs.exists(b'sparse'): | |
402 | ui.status(repo.vfs.read(b"sparse") + b"\n") |
|
405 | ui.status(repo.vfs.read(b"sparse") + b"\n") | |
403 | temporaryincludes = sparse.readtemporaryincludes(repo) |
|
406 | temporaryincludes = sparse.readtemporaryincludes(repo) | |
404 | if temporaryincludes: |
|
407 | if temporaryincludes: | |
405 | ui.status( |
|
408 | ui.status( | |
406 | _(b"Temporarily Included Files (for merge/rebase):\n") |
|
409 | _(b"Temporarily Included Files (for merge/rebase):\n") | |
407 | ) |
|
410 | ) | |
408 | ui.status((b"\n".join(temporaryincludes) + b"\n")) |
|
411 | ui.status((b"\n".join(temporaryincludes) + b"\n")) | |
409 | return |
|
412 | return | |
410 | else: |
|
413 | else: | |
411 | raise error.Abort( |
|
414 | raise error.Abort( | |
412 | _( |
|
415 | _( | |
413 | b'the debugsparse command is only supported on' |
|
416 | b'the debugsparse command is only supported on' | |
414 | b' sparse repositories' |
|
417 | b' sparse repositories' | |
415 | ) |
|
418 | ) | |
416 | ) |
|
419 | ) | |
417 |
|
420 | |||
418 | if updateconfig: |
|
421 | if updateconfig: | |
419 | sparse.updateconfig( |
|
422 | sparse.updateconfig( | |
420 | repo, |
|
423 | repo, | |
421 | opts, |
|
424 | opts, | |
422 | include=include, |
|
425 | include=include, | |
423 | exclude=exclude, |
|
426 | exclude=exclude, | |
424 | reset=reset, |
|
427 | reset=reset, | |
425 | delete=delete, |
|
428 | delete=delete, | |
426 | enableprofile=enableprofile, |
|
429 | enableprofile=enableprofile, | |
427 | disableprofile=disableprofile, |
|
430 | disableprofile=disableprofile, | |
428 | force=force, |
|
431 | force=force, | |
429 | ) |
|
432 | ) | |
430 |
|
433 | |||
431 | if importrules: |
|
434 | if importrules: | |
432 | sparse.importfromfiles(repo, opts, importrules, force=force) |
|
435 | sparse.importfromfiles(repo, opts, importrules, force=force) | |
433 |
|
436 | |||
434 | if clearrules: |
|
437 | if clearrules: | |
435 | sparse.clearrules(repo, force=force) |
|
438 | sparse.clearrules(repo, force=force) | |
436 |
|
439 | |||
437 | if refresh: |
|
440 | if refresh: | |
438 | try: |
|
441 | try: | |
439 | wlock = repo.wlock() |
|
442 | wlock = repo.wlock() | |
440 | fcounts = map( |
|
443 | fcounts = map( | |
441 | len, |
|
444 | len, | |
442 | sparse.refreshwdir( |
|
445 | sparse.refreshwdir( | |
443 | repo, repo.status(), sparse.matcher(repo), force=force |
|
446 | repo, repo.status(), sparse.matcher(repo), force=force | |
444 | ), |
|
447 | ), | |
445 | ) |
|
448 | ) | |
446 | sparse.printchanges( |
|
449 | sparse.printchanges( | |
447 | ui, |
|
450 | ui, | |
448 | opts, |
|
451 | opts, | |
449 | added=fcounts[0], |
|
452 | added=fcounts[0], | |
450 | dropped=fcounts[1], |
|
453 | dropped=fcounts[1], | |
451 | conflicting=fcounts[2], |
|
454 | conflicting=fcounts[2], | |
452 | ) |
|
455 | ) | |
453 | finally: |
|
456 | finally: | |
454 | wlock.release() |
|
457 | wlock.release() | |
|
458 | ||||
|
459 | del repo._has_sparse |
@@ -1,846 +1,856 b'' | |||||
1 | # sparse.py - functionality for sparse checkouts |
|
1 | # sparse.py - functionality for sparse checkouts | |
2 | # |
|
2 | # | |
3 | # Copyright 2014 Facebook, Inc. |
|
3 | # Copyright 2014 Facebook, Inc. | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 |
|
8 | |||
9 | import os |
|
9 | import os | |
10 |
|
10 | |||
11 | from .i18n import _ |
|
11 | from .i18n import _ | |
12 | from .node import hex |
|
12 | from .node import hex | |
13 | from . import ( |
|
13 | from . import ( | |
14 | error, |
|
14 | error, | |
15 | match as matchmod, |
|
15 | match as matchmod, | |
16 | merge as mergemod, |
|
16 | merge as mergemod, | |
17 | mergestate as mergestatemod, |
|
17 | mergestate as mergestatemod, | |
18 | pathutil, |
|
18 | pathutil, | |
19 | pycompat, |
|
19 | pycompat, | |
20 | requirements, |
|
20 | requirements, | |
21 | scmutil, |
|
21 | scmutil, | |
22 | util, |
|
22 | util, | |
23 | ) |
|
23 | ) | |
24 | from .utils import hashutil |
|
24 | from .utils import hashutil | |
25 |
|
25 | |||
26 |
|
26 | |||
27 | # Whether sparse features are enabled. This variable is intended to be |
|
27 | # Whether sparse features are enabled. This variable is intended to be | |
28 | # temporary to facilitate porting sparse to core. It should eventually be |
|
28 | # temporary to facilitate porting sparse to core. It should eventually be | |
29 | # a per-repo option, possibly a repo requirement. |
|
29 | # a per-repo option, possibly a repo requirement. | |
30 | enabled = False |
|
30 | enabled = False | |
31 |
|
31 | |||
32 |
|
32 | |||
|
33 | def use_sparse(repo): | |||
|
34 | if getattr(repo, "_has_sparse", False): | |||
|
35 | # When enabling sparse the first time we need it to be enabled before | |||
|
36 | # actually enabling it. This hack could be avoided if the code was | |||
|
37 | # improved further, however this is an improvement over the previously | |||
|
38 | # existing global variable. | |||
|
39 | return True | |||
|
40 | return requirements.SPARSE_REQUIREMENT in repo.requirements | |||
|
41 | ||||
|
42 | ||||
33 | def parseconfig(ui, raw, action): |
|
43 | def parseconfig(ui, raw, action): | |
34 | """Parse sparse config file content. |
|
44 | """Parse sparse config file content. | |
35 |
|
45 | |||
36 | action is the command which is trigerring this read, can be narrow, sparse |
|
46 | action is the command which is trigerring this read, can be narrow, sparse | |
37 |
|
47 | |||
38 | Returns a tuple of includes, excludes, and profiles. |
|
48 | Returns a tuple of includes, excludes, and profiles. | |
39 | """ |
|
49 | """ | |
40 | with util.timedcm( |
|
50 | with util.timedcm( | |
41 | 'sparse.parseconfig(ui, %d bytes, action=%s)', len(raw), action |
|
51 | 'sparse.parseconfig(ui, %d bytes, action=%s)', len(raw), action | |
42 | ): |
|
52 | ): | |
43 | includes = set() |
|
53 | includes = set() | |
44 | excludes = set() |
|
54 | excludes = set() | |
45 | profiles = set() |
|
55 | profiles = set() | |
46 | current = None |
|
56 | current = None | |
47 | havesection = False |
|
57 | havesection = False | |
48 |
|
58 | |||
49 | for line in raw.split(b'\n'): |
|
59 | for line in raw.split(b'\n'): | |
50 | line = line.strip() |
|
60 | line = line.strip() | |
51 | if not line or line.startswith(b'#'): |
|
61 | if not line or line.startswith(b'#'): | |
52 | # empty or comment line, skip |
|
62 | # empty or comment line, skip | |
53 | continue |
|
63 | continue | |
54 | elif line.startswith(b'%include '): |
|
64 | elif line.startswith(b'%include '): | |
55 | line = line[9:].strip() |
|
65 | line = line[9:].strip() | |
56 | if line: |
|
66 | if line: | |
57 | profiles.add(line) |
|
67 | profiles.add(line) | |
58 | elif line == b'[include]': |
|
68 | elif line == b'[include]': | |
59 | if havesection and current != includes: |
|
69 | if havesection and current != includes: | |
60 | # TODO pass filename into this API so we can report it. |
|
70 | # TODO pass filename into this API so we can report it. | |
61 | raise error.Abort( |
|
71 | raise error.Abort( | |
62 | _( |
|
72 | _( | |
63 | b'%(action)s config cannot have includes ' |
|
73 | b'%(action)s config cannot have includes ' | |
64 | b'after excludes' |
|
74 | b'after excludes' | |
65 | ) |
|
75 | ) | |
66 | % {b'action': action} |
|
76 | % {b'action': action} | |
67 | ) |
|
77 | ) | |
68 | havesection = True |
|
78 | havesection = True | |
69 | current = includes |
|
79 | current = includes | |
70 | continue |
|
80 | continue | |
71 | elif line == b'[exclude]': |
|
81 | elif line == b'[exclude]': | |
72 | havesection = True |
|
82 | havesection = True | |
73 | current = excludes |
|
83 | current = excludes | |
74 | elif line: |
|
84 | elif line: | |
75 | if current is None: |
|
85 | if current is None: | |
76 | raise error.Abort( |
|
86 | raise error.Abort( | |
77 | _( |
|
87 | _( | |
78 | b'%(action)s config entry outside of ' |
|
88 | b'%(action)s config entry outside of ' | |
79 | b'section: %(line)s' |
|
89 | b'section: %(line)s' | |
80 | ) |
|
90 | ) | |
81 | % {b'action': action, b'line': line}, |
|
91 | % {b'action': action, b'line': line}, | |
82 | hint=_( |
|
92 | hint=_( | |
83 | b'add an [include] or [exclude] line ' |
|
93 | b'add an [include] or [exclude] line ' | |
84 | b'to declare the entry type' |
|
94 | b'to declare the entry type' | |
85 | ), |
|
95 | ), | |
86 | ) |
|
96 | ) | |
87 |
|
97 | |||
88 | if line.strip().startswith(b'/'): |
|
98 | if line.strip().startswith(b'/'): | |
89 | ui.warn( |
|
99 | ui.warn( | |
90 | _( |
|
100 | _( | |
91 | b'warning: %(action)s profile cannot use' |
|
101 | b'warning: %(action)s profile cannot use' | |
92 | b' paths starting with /, ignoring %(line)s\n' |
|
102 | b' paths starting with /, ignoring %(line)s\n' | |
93 | ) |
|
103 | ) | |
94 | % {b'action': action, b'line': line} |
|
104 | % {b'action': action, b'line': line} | |
95 | ) |
|
105 | ) | |
96 | continue |
|
106 | continue | |
97 | current.add(line) |
|
107 | current.add(line) | |
98 |
|
108 | |||
99 | return includes, excludes, profiles |
|
109 | return includes, excludes, profiles | |
100 |
|
110 | |||
101 |
|
111 | |||
102 | # Exists as separate function to facilitate monkeypatching. |
|
112 | # Exists as separate function to facilitate monkeypatching. | |
103 | def readprofile(repo, profile, changeid): |
|
113 | def readprofile(repo, profile, changeid): | |
104 | """Resolve the raw content of a sparse profile file.""" |
|
114 | """Resolve the raw content of a sparse profile file.""" | |
105 | # TODO add some kind of cache here because this incurs a manifest |
|
115 | # TODO add some kind of cache here because this incurs a manifest | |
106 | # resolve and can be slow. |
|
116 | # resolve and can be slow. | |
107 | return repo.filectx(profile, changeid=changeid).data() |
|
117 | return repo.filectx(profile, changeid=changeid).data() | |
108 |
|
118 | |||
109 |
|
119 | |||
110 | def patternsforrev(repo, rev): |
|
120 | def patternsforrev(repo, rev): | |
111 | """Obtain sparse checkout patterns for the given rev. |
|
121 | """Obtain sparse checkout patterns for the given rev. | |
112 |
|
122 | |||
113 | Returns a tuple of iterables representing includes, excludes, and |
|
123 | Returns a tuple of iterables representing includes, excludes, and | |
114 | patterns. |
|
124 | patterns. | |
115 | """ |
|
125 | """ | |
116 | # Feature isn't enabled. No-op. |
|
126 | # Feature isn't enabled. No-op. | |
117 |
if not |
|
127 | if not use_sparse(repo): | |
118 | return set(), set(), set() |
|
128 | return set(), set(), set() | |
119 |
|
129 | |||
120 | raw = repo.vfs.tryread(b'sparse') |
|
130 | raw = repo.vfs.tryread(b'sparse') | |
121 | if not raw: |
|
131 | if not raw: | |
122 | return set(), set(), set() |
|
132 | return set(), set(), set() | |
123 |
|
133 | |||
124 | if rev is None: |
|
134 | if rev is None: | |
125 | raise error.Abort( |
|
135 | raise error.Abort( | |
126 | _(b'cannot parse sparse patterns from working directory') |
|
136 | _(b'cannot parse sparse patterns from working directory') | |
127 | ) |
|
137 | ) | |
128 |
|
138 | |||
129 | includes, excludes, profiles = parseconfig(repo.ui, raw, b'sparse') |
|
139 | includes, excludes, profiles = parseconfig(repo.ui, raw, b'sparse') | |
130 | ctx = repo[rev] |
|
140 | ctx = repo[rev] | |
131 |
|
141 | |||
132 | if profiles: |
|
142 | if profiles: | |
133 | visited = set() |
|
143 | visited = set() | |
134 | while profiles: |
|
144 | while profiles: | |
135 | profile = profiles.pop() |
|
145 | profile = profiles.pop() | |
136 | if profile in visited: |
|
146 | if profile in visited: | |
137 | continue |
|
147 | continue | |
138 |
|
148 | |||
139 | visited.add(profile) |
|
149 | visited.add(profile) | |
140 |
|
150 | |||
141 | try: |
|
151 | try: | |
142 | raw = readprofile(repo, profile, rev) |
|
152 | raw = readprofile(repo, profile, rev) | |
143 | except error.ManifestLookupError: |
|
153 | except error.ManifestLookupError: | |
144 | msg = ( |
|
154 | msg = ( | |
145 | b"warning: sparse profile '%s' not found " |
|
155 | b"warning: sparse profile '%s' not found " | |
146 | b"in rev %s - ignoring it\n" % (profile, ctx) |
|
156 | b"in rev %s - ignoring it\n" % (profile, ctx) | |
147 | ) |
|
157 | ) | |
148 | # experimental config: sparse.missingwarning |
|
158 | # experimental config: sparse.missingwarning | |
149 | if repo.ui.configbool(b'sparse', b'missingwarning'): |
|
159 | if repo.ui.configbool(b'sparse', b'missingwarning'): | |
150 | repo.ui.warn(msg) |
|
160 | repo.ui.warn(msg) | |
151 | else: |
|
161 | else: | |
152 | repo.ui.debug(msg) |
|
162 | repo.ui.debug(msg) | |
153 | continue |
|
163 | continue | |
154 |
|
164 | |||
155 | pincludes, pexcludes, subprofs = parseconfig( |
|
165 | pincludes, pexcludes, subprofs = parseconfig( | |
156 | repo.ui, raw, b'sparse' |
|
166 | repo.ui, raw, b'sparse' | |
157 | ) |
|
167 | ) | |
158 | includes.update(pincludes) |
|
168 | includes.update(pincludes) | |
159 | excludes.update(pexcludes) |
|
169 | excludes.update(pexcludes) | |
160 | profiles.update(subprofs) |
|
170 | profiles.update(subprofs) | |
161 |
|
171 | |||
162 | profiles = visited |
|
172 | profiles = visited | |
163 |
|
173 | |||
164 | if includes: |
|
174 | if includes: | |
165 | includes.add(b'.hg*') |
|
175 | includes.add(b'.hg*') | |
166 |
|
176 | |||
167 | return includes, excludes, profiles |
|
177 | return includes, excludes, profiles | |
168 |
|
178 | |||
169 |
|
179 | |||
170 | def activeconfig(repo): |
|
180 | def activeconfig(repo): | |
171 | """Determine the active sparse config rules. |
|
181 | """Determine the active sparse config rules. | |
172 |
|
182 | |||
173 | Rules are constructed by reading the current sparse config and bringing in |
|
183 | Rules are constructed by reading the current sparse config and bringing in | |
174 | referenced profiles from parents of the working directory. |
|
184 | referenced profiles from parents of the working directory. | |
175 | """ |
|
185 | """ | |
176 | revs = [ |
|
186 | revs = [ | |
177 | repo.changelog.rev(node) |
|
187 | repo.changelog.rev(node) | |
178 | for node in repo.dirstate.parents() |
|
188 | for node in repo.dirstate.parents() | |
179 | if node != repo.nullid |
|
189 | if node != repo.nullid | |
180 | ] |
|
190 | ] | |
181 |
|
191 | |||
182 | allincludes = set() |
|
192 | allincludes = set() | |
183 | allexcludes = set() |
|
193 | allexcludes = set() | |
184 | allprofiles = set() |
|
194 | allprofiles = set() | |
185 |
|
195 | |||
186 | for rev in revs: |
|
196 | for rev in revs: | |
187 | includes, excludes, profiles = patternsforrev(repo, rev) |
|
197 | includes, excludes, profiles = patternsforrev(repo, rev) | |
188 | allincludes |= includes |
|
198 | allincludes |= includes | |
189 | allexcludes |= excludes |
|
199 | allexcludes |= excludes | |
190 | allprofiles |= profiles |
|
200 | allprofiles |= profiles | |
191 |
|
201 | |||
192 | return allincludes, allexcludes, allprofiles |
|
202 | return allincludes, allexcludes, allprofiles | |
193 |
|
203 | |||
194 |
|
204 | |||
195 | def configsignature(repo, includetemp=True): |
|
205 | def configsignature(repo, includetemp=True): | |
196 | """Obtain the signature string for the current sparse configuration. |
|
206 | """Obtain the signature string for the current sparse configuration. | |
197 |
|
207 | |||
198 | This is used to construct a cache key for matchers. |
|
208 | This is used to construct a cache key for matchers. | |
199 | """ |
|
209 | """ | |
200 | cache = repo._sparsesignaturecache |
|
210 | cache = repo._sparsesignaturecache | |
201 |
|
211 | |||
202 | signature = cache.get(b'signature') |
|
212 | signature = cache.get(b'signature') | |
203 |
|
213 | |||
204 | if includetemp: |
|
214 | if includetemp: | |
205 | tempsignature = cache.get(b'tempsignature') |
|
215 | tempsignature = cache.get(b'tempsignature') | |
206 | else: |
|
216 | else: | |
207 | tempsignature = b'0' |
|
217 | tempsignature = b'0' | |
208 |
|
218 | |||
209 | if signature is None or (includetemp and tempsignature is None): |
|
219 | if signature is None or (includetemp and tempsignature is None): | |
210 | signature = hex(hashutil.sha1(repo.vfs.tryread(b'sparse')).digest()) |
|
220 | signature = hex(hashutil.sha1(repo.vfs.tryread(b'sparse')).digest()) | |
211 | cache[b'signature'] = signature |
|
221 | cache[b'signature'] = signature | |
212 |
|
222 | |||
213 | if includetemp: |
|
223 | if includetemp: | |
214 | raw = repo.vfs.tryread(b'tempsparse') |
|
224 | raw = repo.vfs.tryread(b'tempsparse') | |
215 | tempsignature = hex(hashutil.sha1(raw).digest()) |
|
225 | tempsignature = hex(hashutil.sha1(raw).digest()) | |
216 | cache[b'tempsignature'] = tempsignature |
|
226 | cache[b'tempsignature'] = tempsignature | |
217 |
|
227 | |||
218 | return b'%s %s' % (signature, tempsignature) |
|
228 | return b'%s %s' % (signature, tempsignature) | |
219 |
|
229 | |||
220 |
|
230 | |||
221 | def writeconfig(repo, includes, excludes, profiles): |
|
231 | def writeconfig(repo, includes, excludes, profiles): | |
222 | """Write the sparse config file given a sparse configuration.""" |
|
232 | """Write the sparse config file given a sparse configuration.""" | |
223 | with repo.vfs(b'sparse', b'wb') as fh: |
|
233 | with repo.vfs(b'sparse', b'wb') as fh: | |
224 | for p in sorted(profiles): |
|
234 | for p in sorted(profiles): | |
225 | fh.write(b'%%include %s\n' % p) |
|
235 | fh.write(b'%%include %s\n' % p) | |
226 |
|
236 | |||
227 | if includes: |
|
237 | if includes: | |
228 | fh.write(b'[include]\n') |
|
238 | fh.write(b'[include]\n') | |
229 | for i in sorted(includes): |
|
239 | for i in sorted(includes): | |
230 | fh.write(i) |
|
240 | fh.write(i) | |
231 | fh.write(b'\n') |
|
241 | fh.write(b'\n') | |
232 |
|
242 | |||
233 | if excludes: |
|
243 | if excludes: | |
234 | fh.write(b'[exclude]\n') |
|
244 | fh.write(b'[exclude]\n') | |
235 | for e in sorted(excludes): |
|
245 | for e in sorted(excludes): | |
236 | fh.write(e) |
|
246 | fh.write(e) | |
237 | fh.write(b'\n') |
|
247 | fh.write(b'\n') | |
238 |
|
248 | |||
239 | repo._sparsesignaturecache.clear() |
|
249 | repo._sparsesignaturecache.clear() | |
240 |
|
250 | |||
241 |
|
251 | |||
242 | def readtemporaryincludes(repo): |
|
252 | def readtemporaryincludes(repo): | |
243 | raw = repo.vfs.tryread(b'tempsparse') |
|
253 | raw = repo.vfs.tryread(b'tempsparse') | |
244 | if not raw: |
|
254 | if not raw: | |
245 | return set() |
|
255 | return set() | |
246 |
|
256 | |||
247 | return set(raw.split(b'\n')) |
|
257 | return set(raw.split(b'\n')) | |
248 |
|
258 | |||
249 |
|
259 | |||
250 | def writetemporaryincludes(repo, includes): |
|
260 | def writetemporaryincludes(repo, includes): | |
251 | repo.vfs.write(b'tempsparse', b'\n'.join(sorted(includes))) |
|
261 | repo.vfs.write(b'tempsparse', b'\n'.join(sorted(includes))) | |
252 | repo._sparsesignaturecache.clear() |
|
262 | repo._sparsesignaturecache.clear() | |
253 |
|
263 | |||
254 |
|
264 | |||
255 | def addtemporaryincludes(repo, additional): |
|
265 | def addtemporaryincludes(repo, additional): | |
256 | includes = readtemporaryincludes(repo) |
|
266 | includes = readtemporaryincludes(repo) | |
257 | for i in additional: |
|
267 | for i in additional: | |
258 | includes.add(i) |
|
268 | includes.add(i) | |
259 | writetemporaryincludes(repo, includes) |
|
269 | writetemporaryincludes(repo, includes) | |
260 |
|
270 | |||
261 |
|
271 | |||
262 | def prunetemporaryincludes(repo): |
|
272 | def prunetemporaryincludes(repo): | |
263 |
if not |
|
273 | if not use_sparse(repo) or not repo.vfs.exists(b'tempsparse'): | |
264 | return |
|
274 | return | |
265 |
|
275 | |||
266 | s = repo.status() |
|
276 | s = repo.status() | |
267 | if s.modified or s.added or s.removed or s.deleted: |
|
277 | if s.modified or s.added or s.removed or s.deleted: | |
268 | # Still have pending changes. Don't bother trying to prune. |
|
278 | # Still have pending changes. Don't bother trying to prune. | |
269 | return |
|
279 | return | |
270 |
|
280 | |||
271 | sparsematch = matcher(repo, includetemp=False) |
|
281 | sparsematch = matcher(repo, includetemp=False) | |
272 | dirstate = repo.dirstate |
|
282 | dirstate = repo.dirstate | |
273 | mresult = mergemod.mergeresult() |
|
283 | mresult = mergemod.mergeresult() | |
274 | dropped = [] |
|
284 | dropped = [] | |
275 | tempincludes = readtemporaryincludes(repo) |
|
285 | tempincludes = readtemporaryincludes(repo) | |
276 | for file in tempincludes: |
|
286 | for file in tempincludes: | |
277 | if file in dirstate and not sparsematch(file): |
|
287 | if file in dirstate and not sparsematch(file): | |
278 | message = _(b'dropping temporarily included sparse files') |
|
288 | message = _(b'dropping temporarily included sparse files') | |
279 | mresult.addfile(file, mergestatemod.ACTION_REMOVE, None, message) |
|
289 | mresult.addfile(file, mergestatemod.ACTION_REMOVE, None, message) | |
280 | dropped.append(file) |
|
290 | dropped.append(file) | |
281 |
|
291 | |||
282 | mergemod.applyupdates( |
|
292 | mergemod.applyupdates( | |
283 | repo, mresult, repo[None], repo[b'.'], False, wantfiledata=False |
|
293 | repo, mresult, repo[None], repo[b'.'], False, wantfiledata=False | |
284 | ) |
|
294 | ) | |
285 |
|
295 | |||
286 | # Fix dirstate |
|
296 | # Fix dirstate | |
287 | for file in dropped: |
|
297 | for file in dropped: | |
288 | dirstate.update_file(file, p1_tracked=False, wc_tracked=False) |
|
298 | dirstate.update_file(file, p1_tracked=False, wc_tracked=False) | |
289 |
|
299 | |||
290 | repo.vfs.unlink(b'tempsparse') |
|
300 | repo.vfs.unlink(b'tempsparse') | |
291 | repo._sparsesignaturecache.clear() |
|
301 | repo._sparsesignaturecache.clear() | |
292 | msg = _( |
|
302 | msg = _( | |
293 | b'cleaned up %d temporarily added file(s) from the ' |
|
303 | b'cleaned up %d temporarily added file(s) from the ' | |
294 | b'sparse checkout\n' |
|
304 | b'sparse checkout\n' | |
295 | ) |
|
305 | ) | |
296 | repo.ui.status(msg % len(tempincludes)) |
|
306 | repo.ui.status(msg % len(tempincludes)) | |
297 |
|
307 | |||
298 |
|
308 | |||
299 | def forceincludematcher(matcher, includes): |
|
309 | def forceincludematcher(matcher, includes): | |
300 | """Returns a matcher that returns true for any of the forced includes |
|
310 | """Returns a matcher that returns true for any of the forced includes | |
301 | before testing against the actual matcher.""" |
|
311 | before testing against the actual matcher.""" | |
302 | kindpats = [(b'path', include, b'') for include in includes] |
|
312 | kindpats = [(b'path', include, b'') for include in includes] | |
303 | includematcher = matchmod.includematcher(b'', kindpats) |
|
313 | includematcher = matchmod.includematcher(b'', kindpats) | |
304 | return matchmod.unionmatcher([includematcher, matcher]) |
|
314 | return matchmod.unionmatcher([includematcher, matcher]) | |
305 |
|
315 | |||
306 |
|
316 | |||
307 | def matcher(repo, revs=None, includetemp=True): |
|
317 | def matcher(repo, revs=None, includetemp=True): | |
308 | """Obtain a matcher for sparse working directories for the given revs. |
|
318 | """Obtain a matcher for sparse working directories for the given revs. | |
309 |
|
319 | |||
310 | If multiple revisions are specified, the matcher is the union of all |
|
320 | If multiple revisions are specified, the matcher is the union of all | |
311 | revs. |
|
321 | revs. | |
312 |
|
322 | |||
313 | ``includetemp`` indicates whether to use the temporary sparse profile. |
|
323 | ``includetemp`` indicates whether to use the temporary sparse profile. | |
314 | """ |
|
324 | """ | |
315 | # If sparse isn't enabled, sparse matcher matches everything. |
|
325 | # If sparse isn't enabled, sparse matcher matches everything. | |
316 |
if not |
|
326 | if not use_sparse(repo): | |
317 | return matchmod.always() |
|
327 | return matchmod.always() | |
318 |
|
328 | |||
319 | if not revs or revs == [None]: |
|
329 | if not revs or revs == [None]: | |
320 | revs = [ |
|
330 | revs = [ | |
321 | repo.changelog.rev(node) |
|
331 | repo.changelog.rev(node) | |
322 | for node in repo.dirstate.parents() |
|
332 | for node in repo.dirstate.parents() | |
323 | if node != repo.nullid |
|
333 | if node != repo.nullid | |
324 | ] |
|
334 | ] | |
325 |
|
335 | |||
326 | signature = configsignature(repo, includetemp=includetemp) |
|
336 | signature = configsignature(repo, includetemp=includetemp) | |
327 |
|
337 | |||
328 | key = b'%s %s' % (signature, b' '.join(map(pycompat.bytestr, revs))) |
|
338 | key = b'%s %s' % (signature, b' '.join(map(pycompat.bytestr, revs))) | |
329 |
|
339 | |||
330 | result = repo._sparsematchercache.get(key) |
|
340 | result = repo._sparsematchercache.get(key) | |
331 | if result: |
|
341 | if result: | |
332 | return result |
|
342 | return result | |
333 |
|
343 | |||
334 | matchers = [] |
|
344 | matchers = [] | |
335 | for rev in revs: |
|
345 | for rev in revs: | |
336 | try: |
|
346 | try: | |
337 | includes, excludes, profiles = patternsforrev(repo, rev) |
|
347 | includes, excludes, profiles = patternsforrev(repo, rev) | |
338 |
|
348 | |||
339 | if includes or excludes: |
|
349 | if includes or excludes: | |
340 | matcher = matchmod.match( |
|
350 | matcher = matchmod.match( | |
341 | repo.root, |
|
351 | repo.root, | |
342 | b'', |
|
352 | b'', | |
343 | [], |
|
353 | [], | |
344 | include=includes, |
|
354 | include=includes, | |
345 | exclude=excludes, |
|
355 | exclude=excludes, | |
346 | default=b'relpath', |
|
356 | default=b'relpath', | |
347 | ) |
|
357 | ) | |
348 | matchers.append(matcher) |
|
358 | matchers.append(matcher) | |
349 | except IOError: |
|
359 | except IOError: | |
350 | pass |
|
360 | pass | |
351 |
|
361 | |||
352 | if not matchers: |
|
362 | if not matchers: | |
353 | result = matchmod.always() |
|
363 | result = matchmod.always() | |
354 | elif len(matchers) == 1: |
|
364 | elif len(matchers) == 1: | |
355 | result = matchers[0] |
|
365 | result = matchers[0] | |
356 | else: |
|
366 | else: | |
357 | result = matchmod.unionmatcher(matchers) |
|
367 | result = matchmod.unionmatcher(matchers) | |
358 |
|
368 | |||
359 | if includetemp: |
|
369 | if includetemp: | |
360 | tempincludes = readtemporaryincludes(repo) |
|
370 | tempincludes = readtemporaryincludes(repo) | |
361 | result = forceincludematcher(result, tempincludes) |
|
371 | result = forceincludematcher(result, tempincludes) | |
362 |
|
372 | |||
363 | repo._sparsematchercache[key] = result |
|
373 | repo._sparsematchercache[key] = result | |
364 |
|
374 | |||
365 | return result |
|
375 | return result | |
366 |
|
376 | |||
367 |
|
377 | |||
368 | def filterupdatesactions(repo, wctx, mctx, branchmerge, mresult): |
|
378 | def filterupdatesactions(repo, wctx, mctx, branchmerge, mresult): | |
369 | """Filter updates to only lay out files that match the sparse rules.""" |
|
379 | """Filter updates to only lay out files that match the sparse rules.""" | |
370 |
if not |
|
380 | if not use_sparse(repo): | |
371 | return |
|
381 | return | |
372 |
|
382 | |||
373 | oldrevs = [pctx.rev() for pctx in wctx.parents()] |
|
383 | oldrevs = [pctx.rev() for pctx in wctx.parents()] | |
374 | oldsparsematch = matcher(repo, oldrevs) |
|
384 | oldsparsematch = matcher(repo, oldrevs) | |
375 |
|
385 | |||
376 | if oldsparsematch.always(): |
|
386 | if oldsparsematch.always(): | |
377 | return |
|
387 | return | |
378 |
|
388 | |||
379 | files = set() |
|
389 | files = set() | |
380 | prunedactions = {} |
|
390 | prunedactions = {} | |
381 |
|
391 | |||
382 | if branchmerge: |
|
392 | if branchmerge: | |
383 | # If we're merging, use the wctx filter, since we're merging into |
|
393 | # If we're merging, use the wctx filter, since we're merging into | |
384 | # the wctx. |
|
394 | # the wctx. | |
385 | sparsematch = matcher(repo, [wctx.p1().rev()]) |
|
395 | sparsematch = matcher(repo, [wctx.p1().rev()]) | |
386 | else: |
|
396 | else: | |
387 | # If we're updating, use the target context's filter, since we're |
|
397 | # If we're updating, use the target context's filter, since we're | |
388 | # moving to the target context. |
|
398 | # moving to the target context. | |
389 | sparsematch = matcher(repo, [mctx.rev()]) |
|
399 | sparsematch = matcher(repo, [mctx.rev()]) | |
390 |
|
400 | |||
391 | temporaryfiles = [] |
|
401 | temporaryfiles = [] | |
392 | for file, action in mresult.filemap(): |
|
402 | for file, action in mresult.filemap(): | |
393 | type, args, msg = action |
|
403 | type, args, msg = action | |
394 | files.add(file) |
|
404 | files.add(file) | |
395 | if sparsematch(file): |
|
405 | if sparsematch(file): | |
396 | prunedactions[file] = action |
|
406 | prunedactions[file] = action | |
397 | elif type == mergestatemod.ACTION_MERGE: |
|
407 | elif type == mergestatemod.ACTION_MERGE: | |
398 | temporaryfiles.append(file) |
|
408 | temporaryfiles.append(file) | |
399 | prunedactions[file] = action |
|
409 | prunedactions[file] = action | |
400 | elif branchmerge: |
|
410 | elif branchmerge: | |
401 | if not type.no_op: |
|
411 | if not type.no_op: | |
402 | temporaryfiles.append(file) |
|
412 | temporaryfiles.append(file) | |
403 | prunedactions[file] = action |
|
413 | prunedactions[file] = action | |
404 | elif type == mergestatemod.ACTION_FORGET: |
|
414 | elif type == mergestatemod.ACTION_FORGET: | |
405 | prunedactions[file] = action |
|
415 | prunedactions[file] = action | |
406 | elif file in wctx: |
|
416 | elif file in wctx: | |
407 | prunedactions[file] = (mergestatemod.ACTION_REMOVE, args, msg) |
|
417 | prunedactions[file] = (mergestatemod.ACTION_REMOVE, args, msg) | |
408 |
|
418 | |||
409 | # in case or rename on one side, it is possible that f1 might not |
|
419 | # in case or rename on one side, it is possible that f1 might not | |
410 | # be present in sparse checkout we should include it |
|
420 | # be present in sparse checkout we should include it | |
411 | # TODO: should we do the same for f2? |
|
421 | # TODO: should we do the same for f2? | |
412 | # exists as a separate check because file can be in sparse and hence |
|
422 | # exists as a separate check because file can be in sparse and hence | |
413 | # if we try to club this condition in above `elif type == ACTION_MERGE` |
|
423 | # if we try to club this condition in above `elif type == ACTION_MERGE` | |
414 | # it won't be triggered |
|
424 | # it won't be triggered | |
415 | if branchmerge and type == mergestatemod.ACTION_MERGE: |
|
425 | if branchmerge and type == mergestatemod.ACTION_MERGE: | |
416 | f1, f2, fa, move, anc = args |
|
426 | f1, f2, fa, move, anc = args | |
417 | if not sparsematch(f1): |
|
427 | if not sparsematch(f1): | |
418 | temporaryfiles.append(f1) |
|
428 | temporaryfiles.append(f1) | |
419 |
|
429 | |||
420 | if len(temporaryfiles) > 0: |
|
430 | if len(temporaryfiles) > 0: | |
421 | repo.ui.status( |
|
431 | repo.ui.status( | |
422 | _( |
|
432 | _( | |
423 | b'temporarily included %d file(s) in the sparse ' |
|
433 | b'temporarily included %d file(s) in the sparse ' | |
424 | b'checkout for merging\n' |
|
434 | b'checkout for merging\n' | |
425 | ) |
|
435 | ) | |
426 | % len(temporaryfiles) |
|
436 | % len(temporaryfiles) | |
427 | ) |
|
437 | ) | |
428 | addtemporaryincludes(repo, temporaryfiles) |
|
438 | addtemporaryincludes(repo, temporaryfiles) | |
429 |
|
439 | |||
430 | # Add the new files to the working copy so they can be merged, etc |
|
440 | # Add the new files to the working copy so they can be merged, etc | |
431 | tmresult = mergemod.mergeresult() |
|
441 | tmresult = mergemod.mergeresult() | |
432 | message = b'temporarily adding to sparse checkout' |
|
442 | message = b'temporarily adding to sparse checkout' | |
433 | wctxmanifest = repo[None].manifest() |
|
443 | wctxmanifest = repo[None].manifest() | |
434 | for file in temporaryfiles: |
|
444 | for file in temporaryfiles: | |
435 | if file in wctxmanifest: |
|
445 | if file in wctxmanifest: | |
436 | fctx = repo[None][file] |
|
446 | fctx = repo[None][file] | |
437 | tmresult.addfile( |
|
447 | tmresult.addfile( | |
438 | file, |
|
448 | file, | |
439 | mergestatemod.ACTION_GET, |
|
449 | mergestatemod.ACTION_GET, | |
440 | (fctx.flags(), False), |
|
450 | (fctx.flags(), False), | |
441 | message, |
|
451 | message, | |
442 | ) |
|
452 | ) | |
443 |
|
453 | |||
444 | with repo.dirstate.parentchange(): |
|
454 | with repo.dirstate.parentchange(): | |
445 | mergemod.applyupdates( |
|
455 | mergemod.applyupdates( | |
446 | repo, |
|
456 | repo, | |
447 | tmresult, |
|
457 | tmresult, | |
448 | repo[None], |
|
458 | repo[None], | |
449 | repo[b'.'], |
|
459 | repo[b'.'], | |
450 | False, |
|
460 | False, | |
451 | wantfiledata=False, |
|
461 | wantfiledata=False, | |
452 | ) |
|
462 | ) | |
453 |
|
463 | |||
454 | dirstate = repo.dirstate |
|
464 | dirstate = repo.dirstate | |
455 | for file, flags, msg in tmresult.getactions( |
|
465 | for file, flags, msg in tmresult.getactions( | |
456 | [mergestatemod.ACTION_GET] |
|
466 | [mergestatemod.ACTION_GET] | |
457 | ): |
|
467 | ): | |
458 | dirstate.update_file(file, p1_tracked=True, wc_tracked=True) |
|
468 | dirstate.update_file(file, p1_tracked=True, wc_tracked=True) | |
459 |
|
469 | |||
460 | profiles = activeconfig(repo)[2] |
|
470 | profiles = activeconfig(repo)[2] | |
461 | changedprofiles = profiles & files |
|
471 | changedprofiles = profiles & files | |
462 | # If an active profile changed during the update, refresh the checkout. |
|
472 | # If an active profile changed during the update, refresh the checkout. | |
463 | # Don't do this during a branch merge, since all incoming changes should |
|
473 | # Don't do this during a branch merge, since all incoming changes should | |
464 | # have been handled by the temporary includes above. |
|
474 | # have been handled by the temporary includes above. | |
465 | if changedprofiles and not branchmerge: |
|
475 | if changedprofiles and not branchmerge: | |
466 | mf = mctx.manifest() |
|
476 | mf = mctx.manifest() | |
467 | for file in mf: |
|
477 | for file in mf: | |
468 | old = oldsparsematch(file) |
|
478 | old = oldsparsematch(file) | |
469 | new = sparsematch(file) |
|
479 | new = sparsematch(file) | |
470 | if not old and new: |
|
480 | if not old and new: | |
471 | flags = mf.flags(file) |
|
481 | flags = mf.flags(file) | |
472 | prunedactions[file] = ( |
|
482 | prunedactions[file] = ( | |
473 | mergestatemod.ACTION_GET, |
|
483 | mergestatemod.ACTION_GET, | |
474 | (flags, False), |
|
484 | (flags, False), | |
475 | b'', |
|
485 | b'', | |
476 | ) |
|
486 | ) | |
477 | elif old and not new: |
|
487 | elif old and not new: | |
478 | prunedactions[file] = (mergestatemod.ACTION_REMOVE, [], b'') |
|
488 | prunedactions[file] = (mergestatemod.ACTION_REMOVE, [], b'') | |
479 |
|
489 | |||
480 | mresult.setactions(prunedactions) |
|
490 | mresult.setactions(prunedactions) | |
481 |
|
491 | |||
482 |
|
492 | |||
483 | def refreshwdir(repo, origstatus, origsparsematch, force=False): |
|
493 | def refreshwdir(repo, origstatus, origsparsematch, force=False): | |
484 | """Refreshes working directory by taking sparse config into account. |
|
494 | """Refreshes working directory by taking sparse config into account. | |
485 |
|
495 | |||
486 | The old status and sparse matcher is compared against the current sparse |
|
496 | The old status and sparse matcher is compared against the current sparse | |
487 | matcher. |
|
497 | matcher. | |
488 |
|
498 | |||
489 | Will abort if a file with pending changes is being excluded or included |
|
499 | Will abort if a file with pending changes is being excluded or included | |
490 | unless ``force`` is True. |
|
500 | unless ``force`` is True. | |
491 | """ |
|
501 | """ | |
492 | # Verify there are no pending changes |
|
502 | # Verify there are no pending changes | |
493 | pending = set() |
|
503 | pending = set() | |
494 | pending.update(origstatus.modified) |
|
504 | pending.update(origstatus.modified) | |
495 | pending.update(origstatus.added) |
|
505 | pending.update(origstatus.added) | |
496 | pending.update(origstatus.removed) |
|
506 | pending.update(origstatus.removed) | |
497 | sparsematch = matcher(repo) |
|
507 | sparsematch = matcher(repo) | |
498 | abort = False |
|
508 | abort = False | |
499 |
|
509 | |||
500 | for f in pending: |
|
510 | for f in pending: | |
501 | if not sparsematch(f): |
|
511 | if not sparsematch(f): | |
502 | repo.ui.warn(_(b"pending changes to '%s'\n") % f) |
|
512 | repo.ui.warn(_(b"pending changes to '%s'\n") % f) | |
503 | abort = not force |
|
513 | abort = not force | |
504 |
|
514 | |||
505 | if abort: |
|
515 | if abort: | |
506 | raise error.Abort( |
|
516 | raise error.Abort( | |
507 | _(b'could not update sparseness due to pending changes') |
|
517 | _(b'could not update sparseness due to pending changes') | |
508 | ) |
|
518 | ) | |
509 |
|
519 | |||
510 | # Calculate merge result |
|
520 | # Calculate merge result | |
511 | dirstate = repo.dirstate |
|
521 | dirstate = repo.dirstate | |
512 | ctx = repo[b'.'] |
|
522 | ctx = repo[b'.'] | |
513 | added = [] |
|
523 | added = [] | |
514 | lookup = [] |
|
524 | lookup = [] | |
515 | dropped = [] |
|
525 | dropped = [] | |
516 | mf = ctx.manifest() |
|
526 | mf = ctx.manifest() | |
517 | files = set(mf) |
|
527 | files = set(mf) | |
518 | mresult = mergemod.mergeresult() |
|
528 | mresult = mergemod.mergeresult() | |
519 |
|
529 | |||
520 | for file in files: |
|
530 | for file in files: | |
521 | old = origsparsematch(file) |
|
531 | old = origsparsematch(file) | |
522 | new = sparsematch(file) |
|
532 | new = sparsematch(file) | |
523 | # Add files that are newly included, or that don't exist in |
|
533 | # Add files that are newly included, or that don't exist in | |
524 | # the dirstate yet. |
|
534 | # the dirstate yet. | |
525 | if (new and not old) or (old and new and not file in dirstate): |
|
535 | if (new and not old) or (old and new and not file in dirstate): | |
526 | fl = mf.flags(file) |
|
536 | fl = mf.flags(file) | |
527 | if repo.wvfs.exists(file): |
|
537 | if repo.wvfs.exists(file): | |
528 | mresult.addfile(file, mergestatemod.ACTION_EXEC, (fl,), b'') |
|
538 | mresult.addfile(file, mergestatemod.ACTION_EXEC, (fl,), b'') | |
529 | lookup.append(file) |
|
539 | lookup.append(file) | |
530 | else: |
|
540 | else: | |
531 | mresult.addfile( |
|
541 | mresult.addfile( | |
532 | file, mergestatemod.ACTION_GET, (fl, False), b'' |
|
542 | file, mergestatemod.ACTION_GET, (fl, False), b'' | |
533 | ) |
|
543 | ) | |
534 | added.append(file) |
|
544 | added.append(file) | |
535 | # Drop files that are newly excluded, or that still exist in |
|
545 | # Drop files that are newly excluded, or that still exist in | |
536 | # the dirstate. |
|
546 | # the dirstate. | |
537 | elif (old and not new) or (not old and not new and file in dirstate): |
|
547 | elif (old and not new) or (not old and not new and file in dirstate): | |
538 | dropped.append(file) |
|
548 | dropped.append(file) | |
539 | if file not in pending: |
|
549 | if file not in pending: | |
540 | mresult.addfile(file, mergestatemod.ACTION_REMOVE, [], b'') |
|
550 | mresult.addfile(file, mergestatemod.ACTION_REMOVE, [], b'') | |
541 |
|
551 | |||
542 | # Verify there are no pending changes in newly included files |
|
552 | # Verify there are no pending changes in newly included files | |
543 | abort = False |
|
553 | abort = False | |
544 | for file in lookup: |
|
554 | for file in lookup: | |
545 | repo.ui.warn(_(b"pending changes to '%s'\n") % file) |
|
555 | repo.ui.warn(_(b"pending changes to '%s'\n") % file) | |
546 | abort = not force |
|
556 | abort = not force | |
547 | if abort: |
|
557 | if abort: | |
548 | raise error.Abort( |
|
558 | raise error.Abort( | |
549 | _( |
|
559 | _( | |
550 | b'cannot change sparseness due to pending ' |
|
560 | b'cannot change sparseness due to pending ' | |
551 | b'changes (delete the files or use ' |
|
561 | b'changes (delete the files or use ' | |
552 | b'--force to bring them back dirty)' |
|
562 | b'--force to bring them back dirty)' | |
553 | ) |
|
563 | ) | |
554 | ) |
|
564 | ) | |
555 |
|
565 | |||
556 | # Check for files that were only in the dirstate. |
|
566 | # Check for files that were only in the dirstate. | |
557 | for file, state in dirstate.items(): |
|
567 | for file, state in dirstate.items(): | |
558 | if not file in files: |
|
568 | if not file in files: | |
559 | old = origsparsematch(file) |
|
569 | old = origsparsematch(file) | |
560 | new = sparsematch(file) |
|
570 | new = sparsematch(file) | |
561 | if old and not new: |
|
571 | if old and not new: | |
562 | dropped.append(file) |
|
572 | dropped.append(file) | |
563 |
|
573 | |||
564 | mergemod.applyupdates( |
|
574 | mergemod.applyupdates( | |
565 | repo, mresult, repo[None], repo[b'.'], False, wantfiledata=False |
|
575 | repo, mresult, repo[None], repo[b'.'], False, wantfiledata=False | |
566 | ) |
|
576 | ) | |
567 |
|
577 | |||
568 | # Fix dirstate |
|
578 | # Fix dirstate | |
569 | for file in added: |
|
579 | for file in added: | |
570 | dirstate.update_file(file, p1_tracked=True, wc_tracked=True) |
|
580 | dirstate.update_file(file, p1_tracked=True, wc_tracked=True) | |
571 |
|
581 | |||
572 | for file in dropped: |
|
582 | for file in dropped: | |
573 | dirstate.update_file(file, p1_tracked=False, wc_tracked=False) |
|
583 | dirstate.update_file(file, p1_tracked=False, wc_tracked=False) | |
574 |
|
584 | |||
575 | for file in lookup: |
|
585 | for file in lookup: | |
576 | # File exists on disk, and we're bringing it back in an unknown state. |
|
586 | # File exists on disk, and we're bringing it back in an unknown state. | |
577 | dirstate.update_file( |
|
587 | dirstate.update_file( | |
578 | file, p1_tracked=True, wc_tracked=True, possibly_dirty=True |
|
588 | file, p1_tracked=True, wc_tracked=True, possibly_dirty=True | |
579 | ) |
|
589 | ) | |
580 |
|
590 | |||
581 | return added, dropped, lookup |
|
591 | return added, dropped, lookup | |
582 |
|
592 | |||
583 |
|
593 | |||
584 | def aftercommit(repo, node): |
|
594 | def aftercommit(repo, node): | |
585 | """Perform actions after a working directory commit.""" |
|
595 | """Perform actions after a working directory commit.""" | |
586 | # This function is called unconditionally, even if sparse isn't |
|
596 | # This function is called unconditionally, even if sparse isn't | |
587 | # enabled. |
|
597 | # enabled. | |
588 | ctx = repo[node] |
|
598 | ctx = repo[node] | |
589 |
|
599 | |||
590 | profiles = patternsforrev(repo, ctx.rev())[2] |
|
600 | profiles = patternsforrev(repo, ctx.rev())[2] | |
591 |
|
601 | |||
592 | # profiles will only have data if sparse is enabled. |
|
602 | # profiles will only have data if sparse is enabled. | |
593 | if profiles & set(ctx.files()): |
|
603 | if profiles & set(ctx.files()): | |
594 | origstatus = repo.status() |
|
604 | origstatus = repo.status() | |
595 | origsparsematch = matcher(repo) |
|
605 | origsparsematch = matcher(repo) | |
596 | refreshwdir(repo, origstatus, origsparsematch, force=True) |
|
606 | refreshwdir(repo, origstatus, origsparsematch, force=True) | |
597 |
|
607 | |||
598 | prunetemporaryincludes(repo) |
|
608 | prunetemporaryincludes(repo) | |
599 |
|
609 | |||
600 |
|
610 | |||
601 | def _updateconfigandrefreshwdir( |
|
611 | def _updateconfigandrefreshwdir( | |
602 | repo, includes, excludes, profiles, force=False, removing=False |
|
612 | repo, includes, excludes, profiles, force=False, removing=False | |
603 | ): |
|
613 | ): | |
604 | """Update the sparse config and working directory state.""" |
|
614 | """Update the sparse config and working directory state.""" | |
605 | with repo.lock(): |
|
615 | with repo.lock(): | |
606 | raw = repo.vfs.tryread(b'sparse') |
|
616 | raw = repo.vfs.tryread(b'sparse') | |
607 | oldincludes, oldexcludes, oldprofiles = parseconfig( |
|
617 | oldincludes, oldexcludes, oldprofiles = parseconfig( | |
608 | repo.ui, raw, b'sparse' |
|
618 | repo.ui, raw, b'sparse' | |
609 | ) |
|
619 | ) | |
610 |
|
620 | |||
611 | oldstatus = repo.status() |
|
621 | oldstatus = repo.status() | |
612 | oldmatch = matcher(repo) |
|
622 | oldmatch = matcher(repo) | |
613 | oldrequires = set(repo.requirements) |
|
623 | oldrequires = set(repo.requirements) | |
614 |
|
624 | |||
615 | # TODO remove this try..except once the matcher integrates better |
|
625 | # TODO remove this try..except once the matcher integrates better | |
616 | # with dirstate. We currently have to write the updated config |
|
626 | # with dirstate. We currently have to write the updated config | |
617 | # because that will invalidate the matcher cache and force a |
|
627 | # because that will invalidate the matcher cache and force a | |
618 | # re-read. We ideally want to update the cached matcher on the |
|
628 | # re-read. We ideally want to update the cached matcher on the | |
619 | # repo instance then flush the new config to disk once wdir is |
|
629 | # repo instance then flush the new config to disk once wdir is | |
620 | # updated. But this requires massive rework to matcher() and its |
|
630 | # updated. But this requires massive rework to matcher() and its | |
621 | # consumers. |
|
631 | # consumers. | |
622 |
|
632 | |||
623 | if requirements.SPARSE_REQUIREMENT in oldrequires and removing: |
|
633 | if requirements.SPARSE_REQUIREMENT in oldrequires and removing: | |
624 | repo.requirements.discard(requirements.SPARSE_REQUIREMENT) |
|
634 | repo.requirements.discard(requirements.SPARSE_REQUIREMENT) | |
625 | scmutil.writereporequirements(repo) |
|
635 | scmutil.writereporequirements(repo) | |
626 | elif requirements.SPARSE_REQUIREMENT not in oldrequires: |
|
636 | elif requirements.SPARSE_REQUIREMENT not in oldrequires: | |
627 | repo.requirements.add(requirements.SPARSE_REQUIREMENT) |
|
637 | repo.requirements.add(requirements.SPARSE_REQUIREMENT) | |
628 | scmutil.writereporequirements(repo) |
|
638 | scmutil.writereporequirements(repo) | |
629 |
|
639 | |||
630 | try: |
|
640 | try: | |
631 | writeconfig(repo, includes, excludes, profiles) |
|
641 | writeconfig(repo, includes, excludes, profiles) | |
632 | return refreshwdir(repo, oldstatus, oldmatch, force=force) |
|
642 | return refreshwdir(repo, oldstatus, oldmatch, force=force) | |
633 | except Exception: |
|
643 | except Exception: | |
634 | if repo.requirements != oldrequires: |
|
644 | if repo.requirements != oldrequires: | |
635 | repo.requirements.clear() |
|
645 | repo.requirements.clear() | |
636 | repo.requirements |= oldrequires |
|
646 | repo.requirements |= oldrequires | |
637 | scmutil.writereporequirements(repo) |
|
647 | scmutil.writereporequirements(repo) | |
638 | writeconfig(repo, oldincludes, oldexcludes, oldprofiles) |
|
648 | writeconfig(repo, oldincludes, oldexcludes, oldprofiles) | |
639 | raise |
|
649 | raise | |
640 |
|
650 | |||
641 |
|
651 | |||
642 | def clearrules(repo, force=False): |
|
652 | def clearrules(repo, force=False): | |
643 | """Clears include/exclude rules from the sparse config. |
|
653 | """Clears include/exclude rules from the sparse config. | |
644 |
|
654 | |||
645 | The remaining sparse config only has profiles, if defined. The working |
|
655 | The remaining sparse config only has profiles, if defined. The working | |
646 | directory is refreshed, as needed. |
|
656 | directory is refreshed, as needed. | |
647 | """ |
|
657 | """ | |
648 | with repo.wlock(), repo.dirstate.parentchange(): |
|
658 | with repo.wlock(), repo.dirstate.parentchange(): | |
649 | raw = repo.vfs.tryread(b'sparse') |
|
659 | raw = repo.vfs.tryread(b'sparse') | |
650 | includes, excludes, profiles = parseconfig(repo.ui, raw, b'sparse') |
|
660 | includes, excludes, profiles = parseconfig(repo.ui, raw, b'sparse') | |
651 |
|
661 | |||
652 | if not includes and not excludes: |
|
662 | if not includes and not excludes: | |
653 | return |
|
663 | return | |
654 |
|
664 | |||
655 | _updateconfigandrefreshwdir(repo, set(), set(), profiles, force=force) |
|
665 | _updateconfigandrefreshwdir(repo, set(), set(), profiles, force=force) | |
656 |
|
666 | |||
657 |
|
667 | |||
658 | def importfromfiles(repo, opts, paths, force=False): |
|
668 | def importfromfiles(repo, opts, paths, force=False): | |
659 | """Import sparse config rules from files. |
|
669 | """Import sparse config rules from files. | |
660 |
|
670 | |||
661 | The updated sparse config is written out and the working directory |
|
671 | The updated sparse config is written out and the working directory | |
662 | is refreshed, as needed. |
|
672 | is refreshed, as needed. | |
663 | """ |
|
673 | """ | |
664 | with repo.wlock(), repo.dirstate.parentchange(): |
|
674 | with repo.wlock(), repo.dirstate.parentchange(): | |
665 | # read current configuration |
|
675 | # read current configuration | |
666 | raw = repo.vfs.tryread(b'sparse') |
|
676 | raw = repo.vfs.tryread(b'sparse') | |
667 | includes, excludes, profiles = parseconfig(repo.ui, raw, b'sparse') |
|
677 | includes, excludes, profiles = parseconfig(repo.ui, raw, b'sparse') | |
668 | aincludes, aexcludes, aprofiles = activeconfig(repo) |
|
678 | aincludes, aexcludes, aprofiles = activeconfig(repo) | |
669 |
|
679 | |||
670 | # Import rules on top; only take in rules that are not yet |
|
680 | # Import rules on top; only take in rules that are not yet | |
671 | # part of the active rules. |
|
681 | # part of the active rules. | |
672 | changed = False |
|
682 | changed = False | |
673 | for p in paths: |
|
683 | for p in paths: | |
674 | with util.posixfile(util.expandpath(p), mode=b'rb') as fh: |
|
684 | with util.posixfile(util.expandpath(p), mode=b'rb') as fh: | |
675 | raw = fh.read() |
|
685 | raw = fh.read() | |
676 |
|
686 | |||
677 | iincludes, iexcludes, iprofiles = parseconfig( |
|
687 | iincludes, iexcludes, iprofiles = parseconfig( | |
678 | repo.ui, raw, b'sparse' |
|
688 | repo.ui, raw, b'sparse' | |
679 | ) |
|
689 | ) | |
680 | oldsize = len(includes) + len(excludes) + len(profiles) |
|
690 | oldsize = len(includes) + len(excludes) + len(profiles) | |
681 | includes.update(iincludes - aincludes) |
|
691 | includes.update(iincludes - aincludes) | |
682 | excludes.update(iexcludes - aexcludes) |
|
692 | excludes.update(iexcludes - aexcludes) | |
683 | profiles.update(iprofiles - aprofiles) |
|
693 | profiles.update(iprofiles - aprofiles) | |
684 | if len(includes) + len(excludes) + len(profiles) > oldsize: |
|
694 | if len(includes) + len(excludes) + len(profiles) > oldsize: | |
685 | changed = True |
|
695 | changed = True | |
686 |
|
696 | |||
687 | profilecount = includecount = excludecount = 0 |
|
697 | profilecount = includecount = excludecount = 0 | |
688 | fcounts = (0, 0, 0) |
|
698 | fcounts = (0, 0, 0) | |
689 |
|
699 | |||
690 | if changed: |
|
700 | if changed: | |
691 | profilecount = len(profiles - aprofiles) |
|
701 | profilecount = len(profiles - aprofiles) | |
692 | includecount = len(includes - aincludes) |
|
702 | includecount = len(includes - aincludes) | |
693 | excludecount = len(excludes - aexcludes) |
|
703 | excludecount = len(excludes - aexcludes) | |
694 |
|
704 | |||
695 | fcounts = map( |
|
705 | fcounts = map( | |
696 | len, |
|
706 | len, | |
697 | _updateconfigandrefreshwdir( |
|
707 | _updateconfigandrefreshwdir( | |
698 | repo, includes, excludes, profiles, force=force |
|
708 | repo, includes, excludes, profiles, force=force | |
699 | ), |
|
709 | ), | |
700 | ) |
|
710 | ) | |
701 |
|
711 | |||
702 | printchanges( |
|
712 | printchanges( | |
703 | repo.ui, opts, profilecount, includecount, excludecount, *fcounts |
|
713 | repo.ui, opts, profilecount, includecount, excludecount, *fcounts | |
704 | ) |
|
714 | ) | |
705 |
|
715 | |||
706 |
|
716 | |||
707 | def updateconfig( |
|
717 | def updateconfig( | |
708 | repo, |
|
718 | repo, | |
709 | opts, |
|
719 | opts, | |
710 | include=(), |
|
720 | include=(), | |
711 | exclude=(), |
|
721 | exclude=(), | |
712 | reset=False, |
|
722 | reset=False, | |
713 | delete=(), |
|
723 | delete=(), | |
714 | enableprofile=(), |
|
724 | enableprofile=(), | |
715 | disableprofile=(), |
|
725 | disableprofile=(), | |
716 | force=False, |
|
726 | force=False, | |
717 | usereporootpaths=False, |
|
727 | usereporootpaths=False, | |
718 | ): |
|
728 | ): | |
719 | """Perform a sparse config update. |
|
729 | """Perform a sparse config update. | |
720 |
|
730 | |||
721 | The new config is written out and a working directory refresh is performed. |
|
731 | The new config is written out and a working directory refresh is performed. | |
722 | """ |
|
732 | """ | |
723 | with repo.wlock(), repo.lock(), repo.dirstate.parentchange(): |
|
733 | with repo.wlock(), repo.lock(), repo.dirstate.parentchange(): | |
724 | raw = repo.vfs.tryread(b'sparse') |
|
734 | raw = repo.vfs.tryread(b'sparse') | |
725 | oldinclude, oldexclude, oldprofiles = parseconfig( |
|
735 | oldinclude, oldexclude, oldprofiles = parseconfig( | |
726 | repo.ui, raw, b'sparse' |
|
736 | repo.ui, raw, b'sparse' | |
727 | ) |
|
737 | ) | |
728 |
|
738 | |||
729 | if reset: |
|
739 | if reset: | |
730 | newinclude = set() |
|
740 | newinclude = set() | |
731 | newexclude = set() |
|
741 | newexclude = set() | |
732 | newprofiles = set() |
|
742 | newprofiles = set() | |
733 | else: |
|
743 | else: | |
734 | newinclude = set(oldinclude) |
|
744 | newinclude = set(oldinclude) | |
735 | newexclude = set(oldexclude) |
|
745 | newexclude = set(oldexclude) | |
736 | newprofiles = set(oldprofiles) |
|
746 | newprofiles = set(oldprofiles) | |
737 |
|
747 | |||
738 | def normalize_pats(pats): |
|
748 | def normalize_pats(pats): | |
739 | if any(os.path.isabs(pat) for pat in pats): |
|
749 | if any(os.path.isabs(pat) for pat in pats): | |
740 | raise error.Abort(_(b'paths cannot be absolute')) |
|
750 | raise error.Abort(_(b'paths cannot be absolute')) | |
741 |
|
751 | |||
742 | if usereporootpaths: |
|
752 | if usereporootpaths: | |
743 | return pats |
|
753 | return pats | |
744 |
|
754 | |||
745 | # let's treat paths as relative to cwd |
|
755 | # let's treat paths as relative to cwd | |
746 | root, cwd = repo.root, repo.getcwd() |
|
756 | root, cwd = repo.root, repo.getcwd() | |
747 | abspats = [] |
|
757 | abspats = [] | |
748 | for kindpat in pats: |
|
758 | for kindpat in pats: | |
749 | kind, pat = matchmod._patsplit(kindpat, None) |
|
759 | kind, pat = matchmod._patsplit(kindpat, None) | |
750 | if kind in matchmod.cwdrelativepatternkinds or kind is None: |
|
760 | if kind in matchmod.cwdrelativepatternkinds or kind is None: | |
751 | ap = (kind + b':' if kind else b'') + pathutil.canonpath( |
|
761 | ap = (kind + b':' if kind else b'') + pathutil.canonpath( | |
752 | root, cwd, pat |
|
762 | root, cwd, pat | |
753 | ) |
|
763 | ) | |
754 | abspats.append(ap) |
|
764 | abspats.append(ap) | |
755 | else: |
|
765 | else: | |
756 | abspats.append(kindpat) |
|
766 | abspats.append(kindpat) | |
757 | return abspats |
|
767 | return abspats | |
758 |
|
768 | |||
759 | include = normalize_pats(include) |
|
769 | include = normalize_pats(include) | |
760 | exclude = normalize_pats(exclude) |
|
770 | exclude = normalize_pats(exclude) | |
761 | delete = normalize_pats(delete) |
|
771 | delete = normalize_pats(delete) | |
762 | disableprofile = normalize_pats(disableprofile) |
|
772 | disableprofile = normalize_pats(disableprofile) | |
763 | enableprofile = normalize_pats(enableprofile) |
|
773 | enableprofile = normalize_pats(enableprofile) | |
764 |
|
774 | |||
765 | newinclude.difference_update(delete) |
|
775 | newinclude.difference_update(delete) | |
766 | newexclude.difference_update(delete) |
|
776 | newexclude.difference_update(delete) | |
767 | newprofiles.difference_update(disableprofile) |
|
777 | newprofiles.difference_update(disableprofile) | |
768 | newinclude.update(include) |
|
778 | newinclude.update(include) | |
769 | newprofiles.update(enableprofile) |
|
779 | newprofiles.update(enableprofile) | |
770 | newexclude.update(exclude) |
|
780 | newexclude.update(exclude) | |
771 |
|
781 | |||
772 | profilecount = len(newprofiles - oldprofiles) - len( |
|
782 | profilecount = len(newprofiles - oldprofiles) - len( | |
773 | oldprofiles - newprofiles |
|
783 | oldprofiles - newprofiles | |
774 | ) |
|
784 | ) | |
775 | includecount = len(newinclude - oldinclude) - len( |
|
785 | includecount = len(newinclude - oldinclude) - len( | |
776 | oldinclude - newinclude |
|
786 | oldinclude - newinclude | |
777 | ) |
|
787 | ) | |
778 | excludecount = len(newexclude - oldexclude) - len( |
|
788 | excludecount = len(newexclude - oldexclude) - len( | |
779 | oldexclude - newexclude |
|
789 | oldexclude - newexclude | |
780 | ) |
|
790 | ) | |
781 |
|
791 | |||
782 | fcounts = map( |
|
792 | fcounts = map( | |
783 | len, |
|
793 | len, | |
784 | _updateconfigandrefreshwdir( |
|
794 | _updateconfigandrefreshwdir( | |
785 | repo, |
|
795 | repo, | |
786 | newinclude, |
|
796 | newinclude, | |
787 | newexclude, |
|
797 | newexclude, | |
788 | newprofiles, |
|
798 | newprofiles, | |
789 | force=force, |
|
799 | force=force, | |
790 | removing=reset, |
|
800 | removing=reset, | |
791 | ), |
|
801 | ), | |
792 | ) |
|
802 | ) | |
793 |
|
803 | |||
794 | printchanges( |
|
804 | printchanges( | |
795 | repo.ui, opts, profilecount, includecount, excludecount, *fcounts |
|
805 | repo.ui, opts, profilecount, includecount, excludecount, *fcounts | |
796 | ) |
|
806 | ) | |
797 |
|
807 | |||
798 |
|
808 | |||
799 | def printchanges( |
|
809 | def printchanges( | |
800 | ui, |
|
810 | ui, | |
801 | opts, |
|
811 | opts, | |
802 | profilecount=0, |
|
812 | profilecount=0, | |
803 | includecount=0, |
|
813 | includecount=0, | |
804 | excludecount=0, |
|
814 | excludecount=0, | |
805 | added=0, |
|
815 | added=0, | |
806 | dropped=0, |
|
816 | dropped=0, | |
807 | conflicting=0, |
|
817 | conflicting=0, | |
808 | ): |
|
818 | ): | |
809 | """Print output summarizing sparse config changes.""" |
|
819 | """Print output summarizing sparse config changes.""" | |
810 | with ui.formatter(b'sparse', opts) as fm: |
|
820 | with ui.formatter(b'sparse', opts) as fm: | |
811 | fm.startitem() |
|
821 | fm.startitem() | |
812 | fm.condwrite( |
|
822 | fm.condwrite( | |
813 | ui.verbose, |
|
823 | ui.verbose, | |
814 | b'profiles_added', |
|
824 | b'profiles_added', | |
815 | _(b'Profiles changed: %d\n'), |
|
825 | _(b'Profiles changed: %d\n'), | |
816 | profilecount, |
|
826 | profilecount, | |
817 | ) |
|
827 | ) | |
818 | fm.condwrite( |
|
828 | fm.condwrite( | |
819 | ui.verbose, |
|
829 | ui.verbose, | |
820 | b'include_rules_added', |
|
830 | b'include_rules_added', | |
821 | _(b'Include rules changed: %d\n'), |
|
831 | _(b'Include rules changed: %d\n'), | |
822 | includecount, |
|
832 | includecount, | |
823 | ) |
|
833 | ) | |
824 | fm.condwrite( |
|
834 | fm.condwrite( | |
825 | ui.verbose, |
|
835 | ui.verbose, | |
826 | b'exclude_rules_added', |
|
836 | b'exclude_rules_added', | |
827 | _(b'Exclude rules changed: %d\n'), |
|
837 | _(b'Exclude rules changed: %d\n'), | |
828 | excludecount, |
|
838 | excludecount, | |
829 | ) |
|
839 | ) | |
830 |
|
840 | |||
831 | # In 'plain' verbose mode, mergemod.applyupdates already outputs what |
|
841 | # In 'plain' verbose mode, mergemod.applyupdates already outputs what | |
832 | # files are added or removed outside of the templating formatter |
|
842 | # files are added or removed outside of the templating formatter | |
833 | # framework. No point in repeating ourselves in that case. |
|
843 | # framework. No point in repeating ourselves in that case. | |
834 | if not fm.isplain(): |
|
844 | if not fm.isplain(): | |
835 | fm.condwrite( |
|
845 | fm.condwrite( | |
836 | ui.verbose, b'files_added', _(b'Files added: %d\n'), added |
|
846 | ui.verbose, b'files_added', _(b'Files added: %d\n'), added | |
837 | ) |
|
847 | ) | |
838 | fm.condwrite( |
|
848 | fm.condwrite( | |
839 | ui.verbose, b'files_dropped', _(b'Files dropped: %d\n'), dropped |
|
849 | ui.verbose, b'files_dropped', _(b'Files dropped: %d\n'), dropped | |
840 | ) |
|
850 | ) | |
841 | fm.condwrite( |
|
851 | fm.condwrite( | |
842 | ui.verbose, |
|
852 | ui.verbose, | |
843 | b'files_conflicting', |
|
853 | b'files_conflicting', | |
844 | _(b'Files conflicting: %d\n'), |
|
854 | _(b'Files conflicting: %d\n'), | |
845 | conflicting, |
|
855 | conflicting, | |
846 | ) |
|
856 | ) |
General Comments 0
You need to be logged in to leave comments.
Login now