Show More
@@ -1,669 +1,671 | |||||
1 | # sparse.py - functionality for sparse checkouts |
|
1 | # sparse.py - functionality for sparse checkouts | |
2 | # |
|
2 | # | |
3 | # Copyright 2014 Facebook, Inc. |
|
3 | # Copyright 2014 Facebook, Inc. | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | import collections |
|
10 | import collections | |
11 | import hashlib |
|
11 | import hashlib | |
12 | import os |
|
12 | import os | |
13 |
|
13 | |||
14 | from .i18n import _ |
|
14 | from .i18n import _ | |
15 | from .node import nullid |
|
15 | from .node import nullid | |
16 | from . import ( |
|
16 | from . import ( | |
17 | error, |
|
17 | error, | |
18 | match as matchmod, |
|
18 | match as matchmod, | |
19 | merge as mergemod, |
|
19 | merge as mergemod, | |
20 | pycompat, |
|
20 | pycompat, | |
21 | util, |
|
21 | util, | |
22 | ) |
|
22 | ) | |
23 |
|
23 | |||
24 | # Whether sparse features are enabled. This variable is intended to be |
|
24 | # Whether sparse features are enabled. This variable is intended to be | |
25 | # temporary to facilitate porting sparse to core. It should eventually be |
|
25 | # temporary to facilitate porting sparse to core. It should eventually be | |
26 | # a per-repo option, possibly a repo requirement. |
|
26 | # a per-repo option, possibly a repo requirement. | |
27 | enabled = False |
|
27 | enabled = False | |
28 |
|
28 | |||
29 | def parseconfig(ui, raw): |
|
29 | def parseconfig(ui, raw): | |
30 | """Parse sparse config file content. |
|
30 | """Parse sparse config file content. | |
31 |
|
31 | |||
32 | Returns a tuple of includes, excludes, and profiles. |
|
32 | Returns a tuple of includes, excludes, and profiles. | |
33 | """ |
|
33 | """ | |
34 | includes = set() |
|
34 | includes = set() | |
35 | excludes = set() |
|
35 | excludes = set() | |
36 | current = includes |
|
36 | current = includes | |
37 | profiles = [] |
|
37 | profiles = [] | |
38 | for line in raw.split('\n'): |
|
38 | for line in raw.split('\n'): | |
39 | line = line.strip() |
|
39 | line = line.strip() | |
40 | if not line or line.startswith('#'): |
|
40 | if not line or line.startswith('#'): | |
41 | # empty or comment line, skip |
|
41 | # empty or comment line, skip | |
42 | continue |
|
42 | continue | |
43 | elif line.startswith('%include '): |
|
43 | elif line.startswith('%include '): | |
44 | line = line[9:].strip() |
|
44 | line = line[9:].strip() | |
45 | if line: |
|
45 | if line: | |
46 | profiles.append(line) |
|
46 | profiles.append(line) | |
47 | elif line == '[include]': |
|
47 | elif line == '[include]': | |
48 | if current != includes: |
|
48 | if current != includes: | |
49 | # TODO pass filename into this API so we can report it. |
|
49 | # TODO pass filename into this API so we can report it. | |
50 | raise error.Abort(_('sparse config cannot have includes ' + |
|
50 | raise error.Abort(_('sparse config cannot have includes ' + | |
51 | 'after excludes')) |
|
51 | 'after excludes')) | |
52 | continue |
|
52 | continue | |
53 | elif line == '[exclude]': |
|
53 | elif line == '[exclude]': | |
54 | current = excludes |
|
54 | current = excludes | |
55 | elif line: |
|
55 | elif line: | |
56 | if line.strip().startswith('/'): |
|
56 | if line.strip().startswith('/'): | |
57 | ui.warn(_('warning: sparse profile cannot use' + |
|
57 | ui.warn(_('warning: sparse profile cannot use' + | |
58 | ' paths starting with /, ignoring %s\n') % line) |
|
58 | ' paths starting with /, ignoring %s\n') % line) | |
59 | continue |
|
59 | continue | |
60 | current.add(line) |
|
60 | current.add(line) | |
61 |
|
61 | |||
62 | return includes, excludes, profiles |
|
62 | return includes, excludes, profiles | |
63 |
|
63 | |||
64 | # Exists as separate function to facilitate monkeypatching. |
|
64 | # Exists as separate function to facilitate monkeypatching. | |
65 | def readprofile(repo, profile, changeid): |
|
65 | def readprofile(repo, profile, changeid): | |
66 | """Resolve the raw content of a sparse profile file.""" |
|
66 | """Resolve the raw content of a sparse profile file.""" | |
67 | # TODO add some kind of cache here because this incurs a manifest |
|
67 | # TODO add some kind of cache here because this incurs a manifest | |
68 | # resolve and can be slow. |
|
68 | # resolve and can be slow. | |
69 | return repo.filectx(profile, changeid=changeid).data() |
|
69 | return repo.filectx(profile, changeid=changeid).data() | |
70 |
|
70 | |||
71 | def patternsforrev(repo, rev): |
|
71 | def patternsforrev(repo, rev): | |
72 | """Obtain sparse checkout patterns for the given rev. |
|
72 | """Obtain sparse checkout patterns for the given rev. | |
73 |
|
73 | |||
74 | Returns a tuple of iterables representing includes, excludes, and |
|
74 | Returns a tuple of iterables representing includes, excludes, and | |
75 | patterns. |
|
75 | patterns. | |
76 | """ |
|
76 | """ | |
77 | # Feature isn't enabled. No-op. |
|
77 | # Feature isn't enabled. No-op. | |
78 | if not enabled: |
|
78 | if not enabled: | |
79 | return set(), set(), [] |
|
79 | return set(), set(), [] | |
80 |
|
80 | |||
81 | raw = repo.vfs.tryread('sparse') |
|
81 | raw = repo.vfs.tryread('sparse') | |
82 | if not raw: |
|
82 | if not raw: | |
83 | return set(), set(), [] |
|
83 | return set(), set(), [] | |
84 |
|
84 | |||
85 | if rev is None: |
|
85 | if rev is None: | |
86 | raise error.Abort(_('cannot parse sparse patterns from working ' |
|
86 | raise error.Abort(_('cannot parse sparse patterns from working ' | |
87 | 'directory')) |
|
87 | 'directory')) | |
88 |
|
88 | |||
89 | includes, excludes, profiles = parseconfig(repo.ui, raw) |
|
89 | includes, excludes, profiles = parseconfig(repo.ui, raw) | |
90 | ctx = repo[rev] |
|
90 | ctx = repo[rev] | |
91 |
|
91 | |||
92 | if profiles: |
|
92 | if profiles: | |
93 | visited = set() |
|
93 | visited = set() | |
94 | while profiles: |
|
94 | while profiles: | |
95 | profile = profiles.pop() |
|
95 | profile = profiles.pop() | |
96 | if profile in visited: |
|
96 | if profile in visited: | |
97 | continue |
|
97 | continue | |
98 |
|
98 | |||
99 | visited.add(profile) |
|
99 | visited.add(profile) | |
100 |
|
100 | |||
101 | try: |
|
101 | try: | |
102 | raw = readprofile(repo, profile, rev) |
|
102 | raw = readprofile(repo, profile, rev) | |
103 | except error.ManifestLookupError: |
|
103 | except error.ManifestLookupError: | |
104 | msg = ( |
|
104 | msg = ( | |
105 | "warning: sparse profile '%s' not found " |
|
105 | "warning: sparse profile '%s' not found " | |
106 | "in rev %s - ignoring it\n" % (profile, ctx)) |
|
106 | "in rev %s - ignoring it\n" % (profile, ctx)) | |
107 | # experimental config: sparse.missingwarning |
|
107 | # experimental config: sparse.missingwarning | |
108 | if repo.ui.configbool( |
|
108 | if repo.ui.configbool( | |
109 | 'sparse', 'missingwarning', True): |
|
109 | 'sparse', 'missingwarning', True): | |
110 | repo.ui.warn(msg) |
|
110 | repo.ui.warn(msg) | |
111 | else: |
|
111 | else: | |
112 | repo.ui.debug(msg) |
|
112 | repo.ui.debug(msg) | |
113 | continue |
|
113 | continue | |
114 |
|
114 | |||
115 | pincludes, pexcludes, subprofs = parseconfig(repo.ui, raw) |
|
115 | pincludes, pexcludes, subprofs = parseconfig(repo.ui, raw) | |
116 | includes.update(pincludes) |
|
116 | includes.update(pincludes) | |
117 | excludes.update(pexcludes) |
|
117 | excludes.update(pexcludes) | |
118 | for subprofile in subprofs: |
|
118 | for subprofile in subprofs: | |
119 | profiles.append(subprofile) |
|
119 | profiles.append(subprofile) | |
120 |
|
120 | |||
121 | profiles = visited |
|
121 | profiles = visited | |
122 |
|
122 | |||
123 | if includes: |
|
123 | if includes: | |
124 | includes.add('.hg*') |
|
124 | includes.add('.hg*') | |
125 |
|
125 | |||
126 | return includes, excludes, profiles |
|
126 | return includes, excludes, profiles | |
127 |
|
127 | |||
128 | def activeconfig(repo): |
|
128 | def activeconfig(repo): | |
129 | """Determine the active sparse config rules. |
|
129 | """Determine the active sparse config rules. | |
130 |
|
130 | |||
131 | Rules are constructed by reading the current sparse config and bringing in |
|
131 | Rules are constructed by reading the current sparse config and bringing in | |
132 | referenced profiles from parents of the working directory. |
|
132 | referenced profiles from parents of the working directory. | |
133 | """ |
|
133 | """ | |
134 | revs = [repo.changelog.rev(node) for node in |
|
134 | revs = [repo.changelog.rev(node) for node in | |
135 | repo.dirstate.parents() if node != nullid] |
|
135 | repo.dirstate.parents() if node != nullid] | |
136 |
|
136 | |||
137 | allincludes = set() |
|
137 | allincludes = set() | |
138 | allexcludes = set() |
|
138 | allexcludes = set() | |
139 | allprofiles = set() |
|
139 | allprofiles = set() | |
140 |
|
140 | |||
141 | for rev in revs: |
|
141 | for rev in revs: | |
142 | includes, excludes, profiles = patternsforrev(repo, rev) |
|
142 | includes, excludes, profiles = patternsforrev(repo, rev) | |
143 | allincludes |= includes |
|
143 | allincludes |= includes | |
144 | allexcludes |= excludes |
|
144 | allexcludes |= excludes | |
145 | allprofiles |= set(profiles) |
|
145 | allprofiles |= set(profiles) | |
146 |
|
146 | |||
147 | return allincludes, allexcludes, allprofiles |
|
147 | return allincludes, allexcludes, allprofiles | |
148 |
|
148 | |||
149 | def configsignature(repo, includetemp=True): |
|
149 | def configsignature(repo, includetemp=True): | |
150 | """Obtain the signature string for the current sparse configuration. |
|
150 | """Obtain the signature string for the current sparse configuration. | |
151 |
|
151 | |||
152 | This is used to construct a cache key for matchers. |
|
152 | This is used to construct a cache key for matchers. | |
153 | """ |
|
153 | """ | |
154 | cache = repo._sparsesignaturecache |
|
154 | cache = repo._sparsesignaturecache | |
155 |
|
155 | |||
156 | signature = cache.get('signature') |
|
156 | signature = cache.get('signature') | |
157 |
|
157 | |||
158 | if includetemp: |
|
158 | if includetemp: | |
159 | tempsignature = cache.get('tempsignature') |
|
159 | tempsignature = cache.get('tempsignature') | |
160 | else: |
|
160 | else: | |
161 | tempsignature = '0' |
|
161 | tempsignature = '0' | |
162 |
|
162 | |||
163 | if signature is None or (includetemp and tempsignature is None): |
|
163 | if signature is None or (includetemp and tempsignature is None): | |
164 | signature = hashlib.sha1(repo.vfs.tryread('sparse')).hexdigest() |
|
164 | signature = hashlib.sha1(repo.vfs.tryread('sparse')).hexdigest() | |
165 | cache['signature'] = signature |
|
165 | cache['signature'] = signature | |
166 |
|
166 | |||
167 | if includetemp: |
|
167 | if includetemp: | |
168 | raw = repo.vfs.tryread('tempsparse') |
|
168 | raw = repo.vfs.tryread('tempsparse') | |
169 | tempsignature = hashlib.sha1(raw).hexdigest() |
|
169 | tempsignature = hashlib.sha1(raw).hexdigest() | |
170 | cache['tempsignature'] = tempsignature |
|
170 | cache['tempsignature'] = tempsignature | |
171 |
|
171 | |||
172 | return '%s %s' % (signature, tempsignature) |
|
172 | return '%s %s' % (signature, tempsignature) | |
173 |
|
173 | |||
174 | def writeconfig(repo, includes, excludes, profiles): |
|
174 | def writeconfig(repo, includes, excludes, profiles): | |
175 | """Write the sparse config file given a sparse configuration.""" |
|
175 | """Write the sparse config file given a sparse configuration.""" | |
176 | with repo.vfs('sparse', 'wb') as fh: |
|
176 | with repo.vfs('sparse', 'wb') as fh: | |
177 | for p in sorted(profiles): |
|
177 | for p in sorted(profiles): | |
178 | fh.write('%%include %s\n' % p) |
|
178 | fh.write('%%include %s\n' % p) | |
179 |
|
179 | |||
180 | if includes: |
|
180 | if includes: | |
181 | fh.write('[include]\n') |
|
181 | fh.write('[include]\n') | |
182 | for i in sorted(includes): |
|
182 | for i in sorted(includes): | |
183 | fh.write(i) |
|
183 | fh.write(i) | |
184 | fh.write('\n') |
|
184 | fh.write('\n') | |
185 |
|
185 | |||
186 | if excludes: |
|
186 | if excludes: | |
187 | fh.write('[exclude]\n') |
|
187 | fh.write('[exclude]\n') | |
188 | for e in sorted(excludes): |
|
188 | for e in sorted(excludes): | |
189 | fh.write(e) |
|
189 | fh.write(e) | |
190 | fh.write('\n') |
|
190 | fh.write('\n') | |
191 |
|
191 | |||
192 | repo._sparsesignaturecache.clear() |
|
192 | repo._sparsesignaturecache.clear() | |
193 |
|
193 | |||
194 | def readtemporaryincludes(repo): |
|
194 | def readtemporaryincludes(repo): | |
195 | raw = repo.vfs.tryread('tempsparse') |
|
195 | raw = repo.vfs.tryread('tempsparse') | |
196 | if not raw: |
|
196 | if not raw: | |
197 | return set() |
|
197 | return set() | |
198 |
|
198 | |||
199 | return set(raw.split('\n')) |
|
199 | return set(raw.split('\n')) | |
200 |
|
200 | |||
201 | def writetemporaryincludes(repo, includes): |
|
201 | def writetemporaryincludes(repo, includes): | |
202 | repo.vfs.write('tempsparse', '\n'.join(sorted(includes))) |
|
202 | repo.vfs.write('tempsparse', '\n'.join(sorted(includes))) | |
203 | repo._sparsesignaturecache.clear() |
|
203 | repo._sparsesignaturecache.clear() | |
204 |
|
204 | |||
205 | def addtemporaryincludes(repo, additional): |
|
205 | def addtemporaryincludes(repo, additional): | |
206 | includes = readtemporaryincludes(repo) |
|
206 | includes = readtemporaryincludes(repo) | |
207 | for i in additional: |
|
207 | for i in additional: | |
208 | includes.add(i) |
|
208 | includes.add(i) | |
209 | writetemporaryincludes(repo, includes) |
|
209 | writetemporaryincludes(repo, includes) | |
210 |
|
210 | |||
211 | def prunetemporaryincludes(repo): |
|
211 | def prunetemporaryincludes(repo): | |
212 | if not enabled or not repo.vfs.exists('tempsparse'): |
|
212 | if not enabled or not repo.vfs.exists('tempsparse'): | |
213 | return |
|
213 | return | |
214 |
|
214 | |||
215 | s = repo.status() |
|
215 | s = repo.status() | |
216 | if s.modified or s.added or s.removed or s.deleted: |
|
216 | if s.modified or s.added or s.removed or s.deleted: | |
217 | # Still have pending changes. Don't bother trying to prune. |
|
217 | # Still have pending changes. Don't bother trying to prune. | |
218 | return |
|
218 | return | |
219 |
|
219 | |||
220 | sparsematch = matcher(repo, includetemp=False) |
|
220 | sparsematch = matcher(repo, includetemp=False) | |
221 | dirstate = repo.dirstate |
|
221 | dirstate = repo.dirstate | |
222 | actions = [] |
|
222 | actions = [] | |
223 | dropped = [] |
|
223 | dropped = [] | |
224 | tempincludes = readtemporaryincludes(repo) |
|
224 | tempincludes = readtemporaryincludes(repo) | |
225 | for file in tempincludes: |
|
225 | for file in tempincludes: | |
226 | if file in dirstate and not sparsematch(file): |
|
226 | if file in dirstate and not sparsematch(file): | |
227 | message = _('dropping temporarily included sparse files') |
|
227 | message = _('dropping temporarily included sparse files') | |
228 | actions.append((file, None, message)) |
|
228 | actions.append((file, None, message)) | |
229 | dropped.append(file) |
|
229 | dropped.append(file) | |
230 |
|
230 | |||
231 | typeactions = collections.defaultdict(list) |
|
231 | typeactions = collections.defaultdict(list) | |
232 | typeactions['r'] = actions |
|
232 | typeactions['r'] = actions | |
233 | mergemod.applyupdates(repo, typeactions, repo[None], repo['.'], False) |
|
233 | mergemod.applyupdates(repo, typeactions, repo[None], repo['.'], False) | |
234 |
|
234 | |||
235 | # Fix dirstate |
|
235 | # Fix dirstate | |
236 | for file in dropped: |
|
236 | for file in dropped: | |
237 | dirstate.drop(file) |
|
237 | dirstate.drop(file) | |
238 |
|
238 | |||
239 | repo.vfs.unlink('tempsparse') |
|
239 | repo.vfs.unlink('tempsparse') | |
240 | repo._sparsesignaturecache.clear() |
|
240 | repo._sparsesignaturecache.clear() | |
241 | msg = _('cleaned up %d temporarily added file(s) from the ' |
|
241 | msg = _('cleaned up %d temporarily added file(s) from the ' | |
242 | 'sparse checkout\n') |
|
242 | 'sparse checkout\n') | |
243 | repo.ui.status(msg % len(tempincludes)) |
|
243 | repo.ui.status(msg % len(tempincludes)) | |
244 |
|
244 | |||
245 | def matcher(repo, revs=None, includetemp=True): |
|
245 | def matcher(repo, revs=None, includetemp=True): | |
246 | """Obtain a matcher for sparse working directories for the given revs. |
|
246 | """Obtain a matcher for sparse working directories for the given revs. | |
247 |
|
247 | |||
248 | If multiple revisions are specified, the matcher is the union of all |
|
248 | If multiple revisions are specified, the matcher is the union of all | |
249 | revs. |
|
249 | revs. | |
250 |
|
250 | |||
251 | ``includetemp`` indicates whether to use the temporary sparse profile. |
|
251 | ``includetemp`` indicates whether to use the temporary sparse profile. | |
252 | """ |
|
252 | """ | |
253 | # If sparse isn't enabled, sparse matcher matches everything. |
|
253 | # If sparse isn't enabled, sparse matcher matches everything. | |
254 | if not enabled: |
|
254 | if not enabled: | |
255 | return matchmod.always(repo.root, '') |
|
255 | return matchmod.always(repo.root, '') | |
256 |
|
256 | |||
257 | if not revs or revs == [None]: |
|
257 | if not revs or revs == [None]: | |
258 | revs = [repo.changelog.rev(node) |
|
258 | revs = [repo.changelog.rev(node) | |
259 | for node in repo.dirstate.parents() if node != nullid] |
|
259 | for node in repo.dirstate.parents() if node != nullid] | |
260 |
|
260 | |||
261 | signature = configsignature(repo, includetemp=includetemp) |
|
261 | signature = configsignature(repo, includetemp=includetemp) | |
262 |
|
262 | |||
263 | key = '%s %s' % (signature, ' '.join(map(pycompat.bytestr, revs))) |
|
263 | key = '%s %s' % (signature, ' '.join(map(pycompat.bytestr, revs))) | |
264 |
|
264 | |||
265 | result = repo._sparsematchercache.get(key) |
|
265 | result = repo._sparsematchercache.get(key) | |
266 | if result: |
|
266 | if result: | |
267 | return result |
|
267 | return result | |
268 |
|
268 | |||
269 | matchers = [] |
|
269 | matchers = [] | |
270 | for rev in revs: |
|
270 | for rev in revs: | |
271 | try: |
|
271 | try: | |
272 | includes, excludes, profiles = patternsforrev(repo, rev) |
|
272 | includes, excludes, profiles = patternsforrev(repo, rev) | |
273 |
|
273 | |||
274 | if includes or excludes: |
|
274 | if includes or excludes: | |
275 | # Explicitly include subdirectories of includes so |
|
275 | # Explicitly include subdirectories of includes so | |
276 | # status will walk them down to the actual include. |
|
276 | # status will walk them down to the actual include. | |
277 | subdirs = set() |
|
277 | subdirs = set() | |
278 | for include in includes: |
|
278 | for include in includes: | |
279 | # TODO consider using posix path functions here so Windows |
|
279 | # TODO consider using posix path functions here so Windows | |
280 | # \ directory separators don't come into play. |
|
280 | # \ directory separators don't come into play. | |
281 | dirname = os.path.dirname(include) |
|
281 | dirname = os.path.dirname(include) | |
282 | # basename is used to avoid issues with absolute |
|
282 | # basename is used to avoid issues with absolute | |
283 | # paths (which on Windows can include the drive). |
|
283 | # paths (which on Windows can include the drive). | |
284 | while os.path.basename(dirname): |
|
284 | while os.path.basename(dirname): | |
285 | subdirs.add(dirname) |
|
285 | subdirs.add(dirname) | |
286 | dirname = os.path.dirname(dirname) |
|
286 | dirname = os.path.dirname(dirname) | |
287 |
|
287 | |||
288 | matcher = matchmod.match(repo.root, '', [], |
|
288 | matcher = matchmod.match(repo.root, '', [], | |
289 | include=includes, exclude=excludes, |
|
289 | include=includes, exclude=excludes, | |
290 | default='relpath') |
|
290 | default='relpath') | |
291 | if subdirs: |
|
291 | if subdirs: | |
292 | matcher = matchmod.forceincludematcher(matcher, subdirs) |
|
292 | matcher = matchmod.forceincludematcher(matcher, subdirs) | |
293 | matchers.append(matcher) |
|
293 | matchers.append(matcher) | |
294 | except IOError: |
|
294 | except IOError: | |
295 | pass |
|
295 | pass | |
296 |
|
296 | |||
297 | if not matchers: |
|
297 | if not matchers: | |
298 | result = matchmod.always(repo.root, '') |
|
298 | result = matchmod.always(repo.root, '') | |
299 | elif len(matchers) == 1: |
|
299 | elif len(matchers) == 1: | |
300 | result = matchers[0] |
|
300 | result = matchers[0] | |
301 | else: |
|
301 | else: | |
302 | result = matchmod.unionmatcher(matchers) |
|
302 | result = matchmod.unionmatcher(matchers) | |
303 |
|
303 | |||
304 | if includetemp: |
|
304 | if includetemp: | |
305 | tempincludes = readtemporaryincludes(repo) |
|
305 | tempincludes = readtemporaryincludes(repo) | |
306 | result = matchmod.forceincludematcher(result, tempincludes) |
|
306 | result = matchmod.forceincludematcher(result, tempincludes) | |
307 |
|
307 | |||
308 | repo._sparsematchercache[key] = result |
|
308 | repo._sparsematchercache[key] = result | |
309 |
|
309 | |||
310 | return result |
|
310 | return result | |
311 |
|
311 | |||
312 | def filterupdatesactions(repo, wctx, mctx, branchmerge, actions): |
|
312 | def filterupdatesactions(repo, wctx, mctx, branchmerge, actions): | |
313 | """Filter updates to only lay out files that match the sparse rules.""" |
|
313 | """Filter updates to only lay out files that match the sparse rules.""" | |
314 | if not enabled: |
|
314 | if not enabled: | |
315 | return actions |
|
315 | return actions | |
316 |
|
316 | |||
317 | oldrevs = [pctx.rev() for pctx in wctx.parents()] |
|
317 | oldrevs = [pctx.rev() for pctx in wctx.parents()] | |
318 | oldsparsematch = matcher(repo, oldrevs) |
|
318 | oldsparsematch = matcher(repo, oldrevs) | |
319 |
|
319 | |||
320 | if oldsparsematch.always(): |
|
320 | if oldsparsematch.always(): | |
321 | return actions |
|
321 | return actions | |
322 |
|
322 | |||
323 | files = set() |
|
323 | files = set() | |
324 | prunedactions = {} |
|
324 | prunedactions = {} | |
325 |
|
325 | |||
326 | if branchmerge: |
|
326 | if branchmerge: | |
327 | # If we're merging, use the wctx filter, since we're merging into |
|
327 | # If we're merging, use the wctx filter, since we're merging into | |
328 | # the wctx. |
|
328 | # the wctx. | |
329 | sparsematch = matcher(repo, [wctx.parents()[0].rev()]) |
|
329 | sparsematch = matcher(repo, [wctx.parents()[0].rev()]) | |
330 | else: |
|
330 | else: | |
331 | # If we're updating, use the target context's filter, since we're |
|
331 | # If we're updating, use the target context's filter, since we're | |
332 | # moving to the target context. |
|
332 | # moving to the target context. | |
333 | sparsematch = matcher(repo, [mctx.rev()]) |
|
333 | sparsematch = matcher(repo, [mctx.rev()]) | |
334 |
|
334 | |||
335 | temporaryfiles = [] |
|
335 | temporaryfiles = [] | |
336 | for file, action in actions.iteritems(): |
|
336 | for file, action in actions.iteritems(): | |
337 | type, args, msg = action |
|
337 | type, args, msg = action | |
338 | files.add(file) |
|
338 | files.add(file) | |
339 | if sparsematch(file): |
|
339 | if sparsematch(file): | |
340 | prunedactions[file] = action |
|
340 | prunedactions[file] = action | |
341 | elif type == 'm': |
|
341 | elif type == 'm': | |
342 | temporaryfiles.append(file) |
|
342 | temporaryfiles.append(file) | |
343 | prunedactions[file] = action |
|
343 | prunedactions[file] = action | |
344 | elif branchmerge: |
|
344 | elif branchmerge: | |
345 | if type != 'k': |
|
345 | if type != 'k': | |
346 | temporaryfiles.append(file) |
|
346 | temporaryfiles.append(file) | |
347 | prunedactions[file] = action |
|
347 | prunedactions[file] = action | |
348 | elif type == 'f': |
|
348 | elif type == 'f': | |
349 | prunedactions[file] = action |
|
349 | prunedactions[file] = action | |
350 | elif file in wctx: |
|
350 | elif file in wctx: | |
351 | prunedactions[file] = ('r', args, msg) |
|
351 | prunedactions[file] = ('r', args, msg) | |
352 |
|
352 | |||
353 | if len(temporaryfiles) > 0: |
|
353 | if len(temporaryfiles) > 0: | |
354 | repo.ui.status(_('temporarily included %d file(s) in the sparse ' |
|
354 | repo.ui.status(_('temporarily included %d file(s) in the sparse ' | |
355 | 'checkout for merging\n') % len(temporaryfiles)) |
|
355 | 'checkout for merging\n') % len(temporaryfiles)) | |
356 | addtemporaryincludes(repo, temporaryfiles) |
|
356 | addtemporaryincludes(repo, temporaryfiles) | |
357 |
|
357 | |||
358 | # Add the new files to the working copy so they can be merged, etc |
|
358 | # Add the new files to the working copy so they can be merged, etc | |
359 | actions = [] |
|
359 | actions = [] | |
360 | message = 'temporarily adding to sparse checkout' |
|
360 | message = 'temporarily adding to sparse checkout' | |
361 | wctxmanifest = repo[None].manifest() |
|
361 | wctxmanifest = repo[None].manifest() | |
362 | for file in temporaryfiles: |
|
362 | for file in temporaryfiles: | |
363 | if file in wctxmanifest: |
|
363 | if file in wctxmanifest: | |
364 | fctx = repo[None][file] |
|
364 | fctx = repo[None][file] | |
365 | actions.append((file, (fctx.flags(), False), message)) |
|
365 | actions.append((file, (fctx.flags(), False), message)) | |
366 |
|
366 | |||
367 | typeactions = collections.defaultdict(list) |
|
367 | typeactions = collections.defaultdict(list) | |
368 | typeactions['g'] = actions |
|
368 | typeactions['g'] = actions | |
369 | mergemod.applyupdates(repo, typeactions, repo[None], repo['.'], |
|
369 | mergemod.applyupdates(repo, typeactions, repo[None], repo['.'], | |
370 | False) |
|
370 | False) | |
371 |
|
371 | |||
372 | dirstate = repo.dirstate |
|
372 | dirstate = repo.dirstate | |
373 | for file, flags, msg in actions: |
|
373 | for file, flags, msg in actions: | |
374 | dirstate.normal(file) |
|
374 | dirstate.normal(file) | |
375 |
|
375 | |||
376 | profiles = activeconfig(repo)[2] |
|
376 | profiles = activeconfig(repo)[2] | |
377 | changedprofiles = profiles & files |
|
377 | changedprofiles = profiles & files | |
378 | # If an active profile changed during the update, refresh the checkout. |
|
378 | # If an active profile changed during the update, refresh the checkout. | |
379 | # Don't do this during a branch merge, since all incoming changes should |
|
379 | # Don't do this during a branch merge, since all incoming changes should | |
380 | # have been handled by the temporary includes above. |
|
380 | # have been handled by the temporary includes above. | |
381 | if changedprofiles and not branchmerge: |
|
381 | if changedprofiles and not branchmerge: | |
382 | mf = mctx.manifest() |
|
382 | mf = mctx.manifest() | |
383 | for file in mf: |
|
383 | for file in mf: | |
384 | old = oldsparsematch(file) |
|
384 | old = oldsparsematch(file) | |
385 | new = sparsematch(file) |
|
385 | new = sparsematch(file) | |
386 | if not old and new: |
|
386 | if not old and new: | |
387 | flags = mf.flags(file) |
|
387 | flags = mf.flags(file) | |
388 | prunedactions[file] = ('g', (flags, False), '') |
|
388 | prunedactions[file] = ('g', (flags, False), '') | |
389 | elif old and not new: |
|
389 | elif old and not new: | |
390 | prunedactions[file] = ('r', [], '') |
|
390 | prunedactions[file] = ('r', [], '') | |
391 |
|
391 | |||
392 | return prunedactions |
|
392 | return prunedactions | |
393 |
|
393 | |||
394 | def refreshwdir(repo, origstatus, origsparsematch, force=False): |
|
394 | def refreshwdir(repo, origstatus, origsparsematch, force=False): | |
395 | """Refreshes working directory by taking sparse config into account. |
|
395 | """Refreshes working directory by taking sparse config into account. | |
396 |
|
396 | |||
397 | The old status and sparse matcher is compared against the current sparse |
|
397 | The old status and sparse matcher is compared against the current sparse | |
398 | matcher. |
|
398 | matcher. | |
399 |
|
399 | |||
400 | Will abort if a file with pending changes is being excluded or included |
|
400 | Will abort if a file with pending changes is being excluded or included | |
401 | unless ``force`` is True. |
|
401 | unless ``force`` is True. | |
402 | """ |
|
402 | """ | |
403 | # Verify there are no pending changes |
|
403 | # Verify there are no pending changes | |
404 | pending = set() |
|
404 | pending = set() | |
405 | pending.update(origstatus.modified) |
|
405 | pending.update(origstatus.modified) | |
406 | pending.update(origstatus.added) |
|
406 | pending.update(origstatus.added) | |
407 | pending.update(origstatus.removed) |
|
407 | pending.update(origstatus.removed) | |
408 | sparsematch = matcher(repo) |
|
408 | sparsematch = matcher(repo) | |
409 | abort = False |
|
409 | abort = False | |
410 |
|
410 | |||
411 | for f in pending: |
|
411 | for f in pending: | |
412 | if not sparsematch(f): |
|
412 | if not sparsematch(f): | |
413 | repo.ui.warn(_("pending changes to '%s'\n") % f) |
|
413 | repo.ui.warn(_("pending changes to '%s'\n") % f) | |
414 | abort = not force |
|
414 | abort = not force | |
415 |
|
415 | |||
416 | if abort: |
|
416 | if abort: | |
417 | raise error.Abort(_('could not update sparseness due to pending ' |
|
417 | raise error.Abort(_('could not update sparseness due to pending ' | |
418 | 'changes')) |
|
418 | 'changes')) | |
419 |
|
419 | |||
420 | # Calculate actions |
|
420 | # Calculate actions | |
421 | dirstate = repo.dirstate |
|
421 | dirstate = repo.dirstate | |
422 | ctx = repo['.'] |
|
422 | ctx = repo['.'] | |
423 | added = [] |
|
423 | added = [] | |
424 | lookup = [] |
|
424 | lookup = [] | |
425 | dropped = [] |
|
425 | dropped = [] | |
426 | mf = ctx.manifest() |
|
426 | mf = ctx.manifest() | |
427 | files = set(mf) |
|
427 | files = set(mf) | |
428 |
|
428 | |||
429 | actions = {} |
|
429 | actions = {} | |
430 |
|
430 | |||
431 | for file in files: |
|
431 | for file in files: | |
432 | old = origsparsematch(file) |
|
432 | old = origsparsematch(file) | |
433 | new = sparsematch(file) |
|
433 | new = sparsematch(file) | |
434 | # Add files that are newly included, or that don't exist in |
|
434 | # Add files that are newly included, or that don't exist in | |
435 | # the dirstate yet. |
|
435 | # the dirstate yet. | |
436 | if (new and not old) or (old and new and not file in dirstate): |
|
436 | if (new and not old) or (old and new and not file in dirstate): | |
437 | fl = mf.flags(file) |
|
437 | fl = mf.flags(file) | |
438 | if repo.wvfs.exists(file): |
|
438 | if repo.wvfs.exists(file): | |
439 | actions[file] = ('e', (fl,), '') |
|
439 | actions[file] = ('e', (fl,), '') | |
440 | lookup.append(file) |
|
440 | lookup.append(file) | |
441 | else: |
|
441 | else: | |
442 | actions[file] = ('g', (fl, False), '') |
|
442 | actions[file] = ('g', (fl, False), '') | |
443 | added.append(file) |
|
443 | added.append(file) | |
444 | # Drop files that are newly excluded, or that still exist in |
|
444 | # Drop files that are newly excluded, or that still exist in | |
445 | # the dirstate. |
|
445 | # the dirstate. | |
446 | elif (old and not new) or (not old and not new and file in dirstate): |
|
446 | elif (old and not new) or (not old and not new and file in dirstate): | |
447 | dropped.append(file) |
|
447 | dropped.append(file) | |
448 | if file not in pending: |
|
448 | if file not in pending: | |
449 | actions[file] = ('r', [], '') |
|
449 | actions[file] = ('r', [], '') | |
450 |
|
450 | |||
451 | # Verify there are no pending changes in newly included files |
|
451 | # Verify there are no pending changes in newly included files | |
452 | abort = False |
|
452 | abort = False | |
453 | for file in lookup: |
|
453 | for file in lookup: | |
454 | repo.ui.warn(_("pending changes to '%s'\n") % file) |
|
454 | repo.ui.warn(_("pending changes to '%s'\n") % file) | |
455 | abort = not force |
|
455 | abort = not force | |
456 | if abort: |
|
456 | if abort: | |
457 | raise error.Abort(_('cannot change sparseness due to pending ' |
|
457 | raise error.Abort(_('cannot change sparseness due to pending ' | |
458 | 'changes (delete the files or use ' |
|
458 | 'changes (delete the files or use ' | |
459 | '--force to bring them back dirty)')) |
|
459 | '--force to bring them back dirty)')) | |
460 |
|
460 | |||
461 | # Check for files that were only in the dirstate. |
|
461 | # Check for files that were only in the dirstate. | |
462 | for file, state in dirstate.iteritems(): |
|
462 | for file, state in dirstate.iteritems(): | |
463 | if not file in files: |
|
463 | if not file in files: | |
464 | old = origsparsematch(file) |
|
464 | old = origsparsematch(file) | |
465 | new = sparsematch(file) |
|
465 | new = sparsematch(file) | |
466 | if old and not new: |
|
466 | if old and not new: | |
467 | dropped.append(file) |
|
467 | dropped.append(file) | |
468 |
|
468 | |||
469 | # Apply changes to disk |
|
469 | # Apply changes to disk | |
470 | typeactions = dict((m, []) for m in 'a f g am cd dc r dm dg m e k'.split()) |
|
470 | typeactions = dict((m, []) for m in 'a f g am cd dc r dm dg m e k'.split()) | |
471 | for f, (m, args, msg) in actions.iteritems(): |
|
471 | for f, (m, args, msg) in actions.iteritems(): | |
472 | if m not in typeactions: |
|
472 | if m not in typeactions: | |
473 | typeactions[m] = [] |
|
473 | typeactions[m] = [] | |
474 | typeactions[m].append((f, args, msg)) |
|
474 | typeactions[m].append((f, args, msg)) | |
475 |
|
475 | |||
476 | mergemod.applyupdates(repo, typeactions, repo[None], repo['.'], False) |
|
476 | mergemod.applyupdates(repo, typeactions, repo[None], repo['.'], False) | |
477 |
|
477 | |||
478 | # Fix dirstate |
|
478 | # Fix dirstate | |
479 | for file in added: |
|
479 | for file in added: | |
480 | dirstate.normal(file) |
|
480 | dirstate.normal(file) | |
481 |
|
481 | |||
482 | for file in dropped: |
|
482 | for file in dropped: | |
483 | dirstate.drop(file) |
|
483 | dirstate.drop(file) | |
484 |
|
484 | |||
485 | for file in lookup: |
|
485 | for file in lookup: | |
486 | # File exists on disk, and we're bringing it back in an unknown state. |
|
486 | # File exists on disk, and we're bringing it back in an unknown state. | |
487 | dirstate.normallookup(file) |
|
487 | dirstate.normallookup(file) | |
488 |
|
488 | |||
489 | return added, dropped, lookup |
|
489 | return added, dropped, lookup | |
490 |
|
490 | |||
491 | def aftercommit(repo, node): |
|
491 | def aftercommit(repo, node): | |
492 | """Perform actions after a working directory commit.""" |
|
492 | """Perform actions after a working directory commit.""" | |
493 | # This function is called unconditionally, even if sparse isn't |
|
493 | # This function is called unconditionally, even if sparse isn't | |
494 | # enabled. |
|
494 | # enabled. | |
495 | ctx = repo[node] |
|
495 | ctx = repo[node] | |
496 |
|
496 | |||
497 | profiles = patternsforrev(repo, ctx.rev())[2] |
|
497 | profiles = patternsforrev(repo, ctx.rev())[2] | |
498 |
|
498 | |||
499 | # profiles will only have data if sparse is enabled. |
|
499 | # profiles will only have data if sparse is enabled. | |
500 | if set(profiles) & set(ctx.files()): |
|
500 | if set(profiles) & set(ctx.files()): | |
501 | origstatus = repo.status() |
|
501 | origstatus = repo.status() | |
502 | origsparsematch = matcher(repo) |
|
502 | origsparsematch = matcher(repo) | |
503 | refreshwdir(repo, origstatus, origsparsematch, force=True) |
|
503 | refreshwdir(repo, origstatus, origsparsematch, force=True) | |
504 |
|
504 | |||
505 | prunetemporaryincludes(repo) |
|
505 | prunetemporaryincludes(repo) | |
506 |
|
506 | |||
507 | def clearrules(repo, force=False): |
|
507 | def clearrules(repo, force=False): | |
508 | """Clears include/exclude rules from the sparse config. |
|
508 | """Clears include/exclude rules from the sparse config. | |
509 |
|
509 | |||
510 | The remaining sparse config only has profiles, if defined. The working |
|
510 | The remaining sparse config only has profiles, if defined. The working | |
511 | directory is refreshed, as needed. |
|
511 | directory is refreshed, as needed. | |
512 | """ |
|
512 | """ | |
513 | with repo.wlock(): |
|
513 | with repo.wlock(): | |
514 | raw = repo.vfs.tryread('sparse') |
|
514 | raw = repo.vfs.tryread('sparse') | |
515 | includes, excludes, profiles = parseconfig(repo.ui, raw) |
|
515 | includes, excludes, profiles = parseconfig(repo.ui, raw) | |
516 |
|
516 | |||
517 | if not includes and not excludes: |
|
517 | if not includes and not excludes: | |
518 | return |
|
518 | return | |
519 |
|
519 | |||
520 | oldstatus = repo.status() |
|
520 | oldstatus = repo.status() | |
521 | oldmatch = matcher(repo) |
|
521 | oldmatch = matcher(repo) | |
522 | writeconfig(repo, set(), set(), profiles) |
|
522 | writeconfig(repo, set(), set(), profiles) | |
523 | refreshwdir(repo, oldstatus, oldmatch, force=force) |
|
523 | refreshwdir(repo, oldstatus, oldmatch, force=force) | |
524 |
|
524 | |||
525 | def importfromfiles(repo, opts, paths, force=False): |
|
525 | def importfromfiles(repo, opts, paths, force=False): | |
526 | """Import sparse config rules from files. |
|
526 | """Import sparse config rules from files. | |
527 |
|
527 | |||
528 | The updated sparse config is written out and the working directory |
|
528 | The updated sparse config is written out and the working directory | |
529 | is refreshed, as needed. |
|
529 | is refreshed, as needed. | |
530 | """ |
|
530 | """ | |
531 | with repo.wlock(): |
|
531 | with repo.wlock(): | |
532 | # read current configuration |
|
532 | # read current configuration | |
533 | raw = repo.vfs.tryread('sparse') |
|
533 | raw = repo.vfs.tryread('sparse') | |
534 | oincludes, oexcludes, oprofiles = parseconfig(repo.ui, raw) |
|
534 | oincludes, oexcludes, oprofiles = parseconfig(repo.ui, raw) | |
535 | includes, excludes, profiles = map( |
|
535 | includes, excludes, profiles = map( | |
536 | set, (oincludes, oexcludes, oprofiles)) |
|
536 | set, (oincludes, oexcludes, oprofiles)) | |
537 |
|
537 | |||
538 | aincludes, aexcludes, aprofiles = activeconfig(repo) |
|
538 | aincludes, aexcludes, aprofiles = activeconfig(repo) | |
539 |
|
539 | |||
540 | # Import rules on top; only take in rules that are not yet |
|
540 | # Import rules on top; only take in rules that are not yet | |
541 | # part of the active rules. |
|
541 | # part of the active rules. | |
542 | changed = False |
|
542 | changed = False | |
543 | for p in paths: |
|
543 | for p in paths: | |
544 | with util.posixfile(util.expandpath(p)) as fh: |
|
544 | with util.posixfile(util.expandpath(p)) as fh: | |
545 | raw = fh.read() |
|
545 | raw = fh.read() | |
546 |
|
546 | |||
547 | iincludes, iexcludes, iprofiles = parseconfig(repo.ui, raw) |
|
547 | iincludes, iexcludes, iprofiles = parseconfig(repo.ui, raw) | |
548 | oldsize = len(includes) + len(excludes) + len(profiles) |
|
548 | oldsize = len(includes) + len(excludes) + len(profiles) | |
549 | includes.update(iincludes - aincludes) |
|
549 | includes.update(iincludes - aincludes) | |
550 | excludes.update(iexcludes - aexcludes) |
|
550 | excludes.update(iexcludes - aexcludes) | |
551 | profiles.update(set(iprofiles) - aprofiles) |
|
551 | profiles.update(set(iprofiles) - aprofiles) | |
552 | if len(includes) + len(excludes) + len(profiles) > oldsize: |
|
552 | if len(includes) + len(excludes) + len(profiles) > oldsize: | |
553 | changed = True |
|
553 | changed = True | |
554 |
|
554 | |||
555 | profilecount = includecount = excludecount = 0 |
|
555 | profilecount = includecount = excludecount = 0 | |
556 | fcounts = (0, 0, 0) |
|
556 | fcounts = (0, 0, 0) | |
557 |
|
557 | |||
558 | if changed: |
|
558 | if changed: | |
559 | profilecount = len(profiles - aprofiles) |
|
559 | profilecount = len(profiles - aprofiles) | |
560 | includecount = len(includes - aincludes) |
|
560 | includecount = len(includes - aincludes) | |
561 | excludecount = len(excludes - aexcludes) |
|
561 | excludecount = len(excludes - aexcludes) | |
562 |
|
562 | |||
563 | oldstatus = repo.status() |
|
563 | oldstatus = repo.status() | |
564 | oldsparsematch = matcher(repo) |
|
564 | oldsparsematch = matcher(repo) | |
565 |
|
565 | |||
566 | # TODO remove this try..except once the matcher integrates better |
|
566 | # TODO remove this try..except once the matcher integrates better | |
567 | # with dirstate. We currently have to write the updated config |
|
567 | # with dirstate. We currently have to write the updated config | |
568 | # because that will invalidate the matcher cache and force a |
|
568 | # because that will invalidate the matcher cache and force a | |
569 | # re-read. We ideally want to update the cached matcher on the |
|
569 | # re-read. We ideally want to update the cached matcher on the | |
570 | # repo instance then flush the new config to disk once wdir is |
|
570 | # repo instance then flush the new config to disk once wdir is | |
571 | # updated. But this requires massive rework to matcher() and its |
|
571 | # updated. But this requires massive rework to matcher() and its | |
572 | # consumers. |
|
572 | # consumers. | |
573 | writeconfig(repo, includes, excludes, profiles) |
|
573 | writeconfig(repo, includes, excludes, profiles) | |
574 |
|
574 | |||
575 | try: |
|
575 | try: | |
576 | fcounts = map( |
|
576 | fcounts = map( | |
577 | len, |
|
577 | len, | |
578 | refreshwdir(repo, oldstatus, oldsparsematch, force=force)) |
|
578 | refreshwdir(repo, oldstatus, oldsparsematch, force=force)) | |
579 | except Exception: |
|
579 | except Exception: | |
580 | writeconfig(repo, oincludes, oexcludes, oprofiles) |
|
580 | writeconfig(repo, oincludes, oexcludes, oprofiles) | |
581 | raise |
|
581 | raise | |
582 |
|
582 | |||
583 | printchanges(repo.ui, opts, profilecount, includecount, excludecount, |
|
583 | printchanges(repo.ui, opts, profilecount, includecount, excludecount, | |
584 | *fcounts) |
|
584 | *fcounts) | |
585 |
|
585 | |||
586 | def updateconfig(repo, pats, opts, include=False, exclude=False, reset=False, |
|
586 | def updateconfig(repo, pats, opts, include=False, exclude=False, reset=False, | |
587 | delete=False, enableprofile=False, disableprofile=False, |
|
587 | delete=False, enableprofile=False, disableprofile=False, | |
588 | force=False): |
|
588 | force=False): | |
589 | """Perform a sparse config update. |
|
589 | """Perform a sparse config update. | |
590 |
|
590 | |||
591 | Only one of the actions may be performed. |
|
591 | Only one of the actions may be performed. | |
592 |
|
592 | |||
593 | The new config is written out and a working directory refresh is performed. |
|
593 | The new config is written out and a working directory refresh is performed. | |
594 | """ |
|
594 | """ | |
595 | with repo.wlock(): |
|
595 | with repo.wlock(): | |
596 | oldmatcher = matcher(repo) |
|
596 | oldmatcher = matcher(repo) | |
597 |
|
597 | |||
598 | raw = repo.vfs.tryread('sparse') |
|
598 | raw = repo.vfs.tryread('sparse') | |
599 | oldinclude, oldexclude, oldprofiles = parseconfig(repo.ui, raw) |
|
599 | oldinclude, oldexclude, oldprofiles = parseconfig(repo.ui, raw) | |
600 | oldprofiles = set(oldprofiles) |
|
600 | oldprofiles = set(oldprofiles) | |
601 |
|
601 | |||
602 |
t |
|
602 | if reset: | |
603 |
|
|
603 | newinclude = set() | |
604 |
|
|
604 | newexclude = set() | |
605 |
|
|
605 | newprofiles = set() | |
606 | newprofiles = set() |
|
606 | else: | |
607 | else: |
|
607 | newinclude = set(oldinclude) | |
608 |
|
|
608 | newexclude = set(oldexclude) | |
609 |
|
|
609 | newprofiles = set(oldprofiles) | |
610 | newprofiles = set(oldprofiles) |
|
|||
611 |
|
610 | |||
612 |
|
|
611 | oldstatus = repo.status() | |
613 |
|
612 | |||
614 |
|
|
613 | if any(pat.startswith('/') for pat in pats): | |
615 |
|
|
614 | repo.ui.warn(_('warning: paths cannot start with /, ignoring: %s\n') | |
616 | 'ignoring: %s\n') % |
|
615 | % ([pat for pat in pats if pat.startswith('/')])) | |
617 | ([pat for pat in pats if pat.startswith('/')])) |
|
616 | elif include: | |
618 |
|
|
617 | newinclude.update(pats) | |
619 | newinclude.update(pats) |
|
618 | elif exclude: | |
620 |
|
|
619 | newexclude.update(pats) | |
621 | newexclude.update(pats) |
|
620 | elif enableprofile: | |
622 | elif enableprofile: |
|
621 | newprofiles.update(pats) | |
623 | newprofiles.update(pats) |
|
622 | elif disableprofile: | |
624 | elif disableprofile: |
|
623 | newprofiles.difference_update(pats) | |
625 | newprofiles.difference_update(pats) |
|
624 | elif delete: | |
626 | elif delete: |
|
625 | newinclude.difference_update(pats) | |
627 |
|
|
626 | newexclude.difference_update(pats) | |
628 | newexclude.difference_update(pats) |
|
|||
629 |
|
627 | |||
630 | writeconfig(repo, newinclude, newexclude, newprofiles) |
|
628 | profilecount = (len(newprofiles - oldprofiles) - | |
|
629 | len(oldprofiles - newprofiles)) | |||
|
630 | includecount = (len(newinclude - oldinclude) - | |||
|
631 | len(oldinclude - newinclude)) | |||
|
632 | excludecount = (len(newexclude - oldexclude) - | |||
|
633 | len(oldexclude - newexclude)) | |||
631 |
|
634 | |||
|
635 | # TODO clean up this writeconfig() + try..except pattern once we can. | |||
|
636 | # See comment in importfromfiles() explaining it. | |||
|
637 | writeconfig(repo, newinclude, newexclude, newprofiles) | |||
|
638 | ||||
|
639 | try: | |||
632 | fcounts = map( |
|
640 | fcounts = map( | |
633 | len, |
|
641 | len, | |
634 | refreshwdir(repo, oldstatus, oldmatcher, force=force)) |
|
642 | refreshwdir(repo, oldstatus, oldmatcher, force=force)) | |
635 |
|
643 | |||
636 | profilecount = (len(newprofiles - oldprofiles) - |
|
|||
637 | len(oldprofiles - newprofiles)) |
|
|||
638 | includecount = (len(newinclude - oldinclude) - |
|
|||
639 | len(oldinclude - newinclude)) |
|
|||
640 | excludecount = (len(newexclude - oldexclude) - |
|
|||
641 | len(oldexclude - newexclude)) |
|
|||
642 | printchanges(repo.ui, opts, profilecount, includecount, |
|
644 | printchanges(repo.ui, opts, profilecount, includecount, | |
643 | excludecount, *fcounts) |
|
645 | excludecount, *fcounts) | |
644 | except Exception: |
|
646 | except Exception: | |
645 | writeconfig(repo, oldinclude, oldexclude, oldprofiles) |
|
647 | writeconfig(repo, oldinclude, oldexclude, oldprofiles) | |
646 | raise |
|
648 | raise | |
647 |
|
649 | |||
648 | def printchanges(ui, opts, profilecount=0, includecount=0, excludecount=0, |
|
650 | def printchanges(ui, opts, profilecount=0, includecount=0, excludecount=0, | |
649 | added=0, dropped=0, conflicting=0): |
|
651 | added=0, dropped=0, conflicting=0): | |
650 | """Print output summarizing sparse config changes.""" |
|
652 | """Print output summarizing sparse config changes.""" | |
651 | with ui.formatter('sparse', opts) as fm: |
|
653 | with ui.formatter('sparse', opts) as fm: | |
652 | fm.startitem() |
|
654 | fm.startitem() | |
653 | fm.condwrite(ui.verbose, 'profiles_added', _('Profiles changed: %d\n'), |
|
655 | fm.condwrite(ui.verbose, 'profiles_added', _('Profiles changed: %d\n'), | |
654 | profilecount) |
|
656 | profilecount) | |
655 | fm.condwrite(ui.verbose, 'include_rules_added', |
|
657 | fm.condwrite(ui.verbose, 'include_rules_added', | |
656 | _('Include rules changed: %d\n'), includecount) |
|
658 | _('Include rules changed: %d\n'), includecount) | |
657 | fm.condwrite(ui.verbose, 'exclude_rules_added', |
|
659 | fm.condwrite(ui.verbose, 'exclude_rules_added', | |
658 | _('Exclude rules changed: %d\n'), excludecount) |
|
660 | _('Exclude rules changed: %d\n'), excludecount) | |
659 |
|
661 | |||
660 | # In 'plain' verbose mode, mergemod.applyupdates already outputs what |
|
662 | # In 'plain' verbose mode, mergemod.applyupdates already outputs what | |
661 | # files are added or removed outside of the templating formatter |
|
663 | # files are added or removed outside of the templating formatter | |
662 | # framework. No point in repeating ourselves in that case. |
|
664 | # framework. No point in repeating ourselves in that case. | |
663 | if not fm.isplain(): |
|
665 | if not fm.isplain(): | |
664 | fm.condwrite(ui.verbose, 'files_added', _('Files added: %d\n'), |
|
666 | fm.condwrite(ui.verbose, 'files_added', _('Files added: %d\n'), | |
665 | added) |
|
667 | added) | |
666 | fm.condwrite(ui.verbose, 'files_dropped', _('Files dropped: %d\n'), |
|
668 | fm.condwrite(ui.verbose, 'files_dropped', _('Files dropped: %d\n'), | |
667 | dropped) |
|
669 | dropped) | |
668 | fm.condwrite(ui.verbose, 'files_conflicting', |
|
670 | fm.condwrite(ui.verbose, 'files_conflicting', | |
669 | _('Files conflicting: %d\n'), conflicting) |
|
671 | _('Files conflicting: %d\n'), conflicting) |
General Comments 0
You need to be logged in to leave comments.
Login now