Show More
@@ -1,440 +1,491 b'' | |||||
1 | # sparse.py - allow sparse checkouts of the working directory |
|
1 | # sparse.py - allow sparse checkouts of the working directory | |
2 | # |
|
2 | # | |
3 | # Copyright 2014 Facebook, Inc. |
|
3 | # Copyright 2014 Facebook, Inc. | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | """allow sparse checkouts of the working directory (EXPERIMENTAL) |
|
8 | """allow sparse checkouts of the working directory (EXPERIMENTAL) | |
9 |
|
9 | |||
10 | (This extension is not yet protected by backwards compatibility |
|
10 | (This extension is not yet protected by backwards compatibility | |
11 | guarantees. Any aspect may break in future releases until this |
|
11 | guarantees. Any aspect may break in future releases until this | |
12 | notice is removed.) |
|
12 | notice is removed.) | |
13 |
|
13 | |||
14 | This extension allows the working directory to only consist of a |
|
14 | This extension allows the working directory to only consist of a | |
15 | subset of files for the revision. This allows specific files or |
|
15 | subset of files for the revision. This allows specific files or | |
16 | directories to be explicitly included or excluded. Many repository |
|
16 | directories to be explicitly included or excluded. Many repository | |
17 | operations have performance proportional to the number of files in |
|
17 | operations have performance proportional to the number of files in | |
18 | the working directory. So only realizing a subset of files in the |
|
18 | the working directory. So only realizing a subset of files in the | |
19 | working directory can improve performance. |
|
19 | working directory can improve performance. | |
20 |
|
20 | |||
21 | Sparse Config Files |
|
21 | Sparse Config Files | |
22 | ------------------- |
|
22 | ------------------- | |
23 |
|
23 | |||
24 | The set of files that are part of a sparse checkout are defined by |
|
24 | The set of files that are part of a sparse checkout are defined by | |
25 | a sparse config file. The file defines 3 things: includes (files to |
|
25 | a sparse config file. The file defines 3 things: includes (files to | |
26 | include in the sparse checkout), excludes (files to exclude from the |
|
26 | include in the sparse checkout), excludes (files to exclude from the | |
27 | sparse checkout), and profiles (links to other config files). |
|
27 | sparse checkout), and profiles (links to other config files). | |
28 |
|
28 | |||
29 | The file format is newline delimited. Empty lines and lines beginning |
|
29 | The file format is newline delimited. Empty lines and lines beginning | |
30 | with ``#`` are ignored. |
|
30 | with ``#`` are ignored. | |
31 |
|
31 | |||
32 | Lines beginning with ``%include `` denote another sparse config file |
|
32 | Lines beginning with ``%include `` denote another sparse config file | |
33 | to include. e.g. ``%include tests.sparse``. The filename is relative |
|
33 | to include. e.g. ``%include tests.sparse``. The filename is relative | |
34 | to the repository root. |
|
34 | to the repository root. | |
35 |
|
35 | |||
36 | The special lines ``[include]`` and ``[exclude]`` denote the section |
|
36 | The special lines ``[include]`` and ``[exclude]`` denote the section | |
37 | for includes and excludes that follow, respectively. It is illegal to |
|
37 | for includes and excludes that follow, respectively. It is illegal to | |
38 | have ``[include]`` after ``[exclude]``. |
|
38 | have ``[include]`` after ``[exclude]``. | |
39 |
|
39 | |||
40 | Non-special lines resemble file patterns to be added to either includes |
|
40 | Non-special lines resemble file patterns to be added to either includes | |
41 | or excludes. The syntax of these lines is documented by :hg:`help patterns`. |
|
41 | or excludes. The syntax of these lines is documented by :hg:`help patterns`. | |
42 | Patterns are interpreted as ``glob:`` by default and match against the |
|
42 | Patterns are interpreted as ``glob:`` by default and match against the | |
43 | root of the repository. |
|
43 | root of the repository. | |
44 |
|
44 | |||
45 | Exclusion patterns take precedence over inclusion patterns. So even |
|
45 | Exclusion patterns take precedence over inclusion patterns. So even | |
46 | if a file is explicitly included, an ``[exclude]`` entry can remove it. |
|
46 | if a file is explicitly included, an ``[exclude]`` entry can remove it. | |
47 |
|
47 | |||
48 | For example, say you have a repository with 3 directories, ``frontend/``, |
|
48 | For example, say you have a repository with 3 directories, ``frontend/``, | |
49 | ``backend/``, and ``tools/``. ``frontend/`` and ``backend/`` correspond |
|
49 | ``backend/``, and ``tools/``. ``frontend/`` and ``backend/`` correspond | |
50 | to different projects and it is uncommon for someone working on one |
|
50 | to different projects and it is uncommon for someone working on one | |
51 | to need the files for the other. But ``tools/`` contains files shared |
|
51 | to need the files for the other. But ``tools/`` contains files shared | |
52 | between both projects. Your sparse config files may resemble:: |
|
52 | between both projects. Your sparse config files may resemble:: | |
53 |
|
53 | |||
54 | # frontend.sparse |
|
54 | # frontend.sparse | |
55 | frontend/** |
|
55 | frontend/** | |
56 | tools/** |
|
56 | tools/** | |
57 |
|
57 | |||
58 | # backend.sparse |
|
58 | # backend.sparse | |
59 | backend/** |
|
59 | backend/** | |
60 | tools/** |
|
60 | tools/** | |
61 |
|
61 | |||
62 | Say the backend grows in size. Or there's a directory with thousands |
|
62 | Say the backend grows in size. Or there's a directory with thousands | |
63 | of files you wish to exclude. You can modify the profile to exclude |
|
63 | of files you wish to exclude. You can modify the profile to exclude | |
64 | certain files:: |
|
64 | certain files:: | |
65 |
|
65 | |||
66 | [include] |
|
66 | [include] | |
67 | backend/** |
|
67 | backend/** | |
68 | tools/** |
|
68 | tools/** | |
69 |
|
69 | |||
70 | [exclude] |
|
70 | [exclude] | |
71 | tools/tests/** |
|
71 | tools/tests/** | |
72 | """ |
|
72 | """ | |
73 |
|
73 | |||
74 | from __future__ import absolute_import |
|
74 | from __future__ import absolute_import | |
75 |
|
75 | |||
76 | from mercurial.i18n import _ |
|
76 | from mercurial.i18n import _ | |
77 | from mercurial.pycompat import setattr |
|
77 | from mercurial.pycompat import setattr | |
78 | from mercurial import ( |
|
78 | from mercurial import ( | |
79 | commands, |
|
79 | commands, | |
80 | dirstate, |
|
80 | dirstate, | |
81 | error, |
|
81 | error, | |
82 | extensions, |
|
82 | extensions, | |
83 | logcmdutil, |
|
83 | logcmdutil, | |
84 | match as matchmod, |
|
84 | match as matchmod, | |
85 | merge as mergemod, |
|
85 | merge as mergemod, | |
86 | pycompat, |
|
86 | pycompat, | |
87 | registrar, |
|
87 | registrar, | |
88 | sparse, |
|
88 | sparse, | |
|
89 | subrepo, | |||
|
90 | subrepoutil, | |||
89 | util, |
|
91 | util, | |
90 | ) |
|
92 | ) | |
91 |
|
93 | |||
92 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for |
|
94 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for | |
93 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should |
|
95 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should | |
94 | # be specifying the version(s) of Mercurial they are tested with, or |
|
96 | # be specifying the version(s) of Mercurial they are tested with, or | |
95 | # leave the attribute unspecified. |
|
97 | # leave the attribute unspecified. | |
96 | testedwith = b'ships-with-hg-core' |
|
98 | testedwith = b'ships-with-hg-core' | |
97 |
|
99 | |||
98 | cmdtable = {} |
|
100 | cmdtable = {} | |
99 | command = registrar.command(cmdtable) |
|
101 | command = registrar.command(cmdtable) | |
100 |
|
102 | |||
101 |
|
103 | |||
102 | def extsetup(ui): |
|
104 | def extsetup(ui): | |
103 | sparse.enabled = True |
|
105 | sparse.enabled = True | |
104 |
|
106 | |||
105 | _setupclone(ui) |
|
107 | _setupclone(ui) | |
106 | _setuplog(ui) |
|
108 | _setuplog(ui) | |
107 | _setupadd(ui) |
|
109 | _setupadd(ui) | |
108 | _setupdirstate(ui) |
|
110 | _setupdirstate(ui) | |
|
111 | _setupsubrepo(ui) | |||
109 |
|
112 | |||
110 |
|
113 | |||
111 | def replacefilecache(cls, propname, replacement): |
|
114 | def replacefilecache(cls, propname, replacement): | |
112 | """Replace a filecache property with a new class. This allows changing the |
|
115 | """Replace a filecache property with a new class. This allows changing the | |
113 | cache invalidation condition.""" |
|
116 | cache invalidation condition.""" | |
114 | origcls = cls |
|
117 | origcls = cls | |
115 | assert callable(replacement) |
|
118 | assert callable(replacement) | |
116 | while cls is not object: |
|
119 | while cls is not object: | |
117 | if propname in cls.__dict__: |
|
120 | if propname in cls.__dict__: | |
118 | orig = cls.__dict__[propname] |
|
121 | orig = cls.__dict__[propname] | |
119 | setattr(cls, propname, replacement(orig)) |
|
122 | setattr(cls, propname, replacement(orig)) | |
120 | break |
|
123 | break | |
121 | cls = cls.__bases__[0] |
|
124 | cls = cls.__bases__[0] | |
122 |
|
125 | |||
123 | if cls is object: |
|
126 | if cls is object: | |
124 | raise AttributeError( |
|
127 | raise AttributeError( | |
125 | _(b"type '%s' has no property '%s'") % (origcls, propname) |
|
128 | _(b"type '%s' has no property '%s'") % (origcls, propname) | |
126 | ) |
|
129 | ) | |
127 |
|
130 | |||
128 |
|
131 | |||
129 | def _setuplog(ui): |
|
132 | def _setuplog(ui): | |
130 | entry = commands.table[b'log|history'] |
|
133 | entry = commands.table[b'log|history'] | |
131 | entry[1].append( |
|
134 | entry[1].append( | |
132 | ( |
|
135 | ( | |
133 | b'', |
|
136 | b'', | |
134 | b'sparse', |
|
137 | b'sparse', | |
135 | None, |
|
138 | None, | |
136 | b"limit to changesets affecting the sparse checkout", |
|
139 | b"limit to changesets affecting the sparse checkout", | |
137 | ) |
|
140 | ) | |
138 | ) |
|
141 | ) | |
139 |
|
142 | |||
140 | def _initialrevs(orig, repo, wopts): |
|
143 | def _initialrevs(orig, repo, wopts): | |
141 | revs = orig(repo, wopts) |
|
144 | revs = orig(repo, wopts) | |
142 | if wopts.opts.get(b'sparse'): |
|
145 | if wopts.opts.get(b'sparse'): | |
143 | sparsematch = sparse.matcher(repo) |
|
146 | sparsematch = sparse.matcher(repo) | |
144 |
|
147 | |||
145 | def ctxmatch(rev): |
|
148 | def ctxmatch(rev): | |
146 | ctx = repo[rev] |
|
149 | ctx = repo[rev] | |
147 | return any(f for f in ctx.files() if sparsematch(f)) |
|
150 | return any(f for f in ctx.files() if sparsematch(f)) | |
148 |
|
151 | |||
149 | revs = revs.filter(ctxmatch) |
|
152 | revs = revs.filter(ctxmatch) | |
150 | return revs |
|
153 | return revs | |
151 |
|
154 | |||
152 | extensions.wrapfunction(logcmdutil, b'_initialrevs', _initialrevs) |
|
155 | extensions.wrapfunction(logcmdutil, b'_initialrevs', _initialrevs) | |
153 |
|
156 | |||
154 |
|
157 | |||
155 | def _clonesparsecmd(orig, ui, repo, *args, **opts): |
|
158 | def _clonesparsecmd(orig, ui, repo, *args, **opts): | |
156 | include_pat = opts.get('include') |
|
159 | include_pat = opts.get('include') | |
157 | exclude_pat = opts.get('exclude') |
|
160 | exclude_pat = opts.get('exclude') | |
158 | enableprofile_pat = opts.get('enable_profile') |
|
161 | enableprofile_pat = opts.get('enable_profile') | |
159 | narrow_pat = opts.get('narrow') |
|
162 | narrow_pat = opts.get('narrow') | |
160 | include = exclude = enableprofile = False |
|
163 | include = exclude = enableprofile = False | |
161 | if include_pat: |
|
164 | if include_pat: | |
162 | pat = include_pat |
|
165 | pat = include_pat | |
163 | include = True |
|
166 | include = True | |
164 | if exclude_pat: |
|
167 | if exclude_pat: | |
165 | pat = exclude_pat |
|
168 | pat = exclude_pat | |
166 | exclude = True |
|
169 | exclude = True | |
167 | if enableprofile_pat: |
|
170 | if enableprofile_pat: | |
168 | pat = enableprofile_pat |
|
171 | pat = enableprofile_pat | |
169 | enableprofile = True |
|
172 | enableprofile = True | |
170 | if sum([include, exclude, enableprofile]) > 1: |
|
173 | if sum([include, exclude, enableprofile]) > 1: | |
171 | raise error.Abort(_(b"too many flags specified.")) |
|
174 | raise error.Abort(_(b"too many flags specified.")) | |
172 | # if --narrow is passed, it means they are includes and excludes for narrow |
|
175 | # if --narrow is passed, it means they are includes and excludes for narrow | |
173 | # clone |
|
176 | # clone | |
174 | if not narrow_pat and (include or exclude or enableprofile): |
|
177 | if not narrow_pat and (include or exclude or enableprofile): | |
175 |
|
178 | |||
176 | def clonesparse(orig, ctx, *args, **kwargs): |
|
179 | def clonesparse(orig, ctx, *args, **kwargs): | |
177 | sparse.updateconfig( |
|
180 | sparse.updateconfig( | |
178 | ctx.repo().unfiltered(), |
|
181 | ctx.repo().unfiltered(), | |
179 | pat, |
|
182 | pat, | |
180 | {}, |
|
183 | {}, | |
181 | include=include, |
|
184 | include=include, | |
182 | exclude=exclude, |
|
185 | exclude=exclude, | |
183 | enableprofile=enableprofile, |
|
186 | enableprofile=enableprofile, | |
184 | usereporootpaths=True, |
|
187 | usereporootpaths=True, | |
185 | ) |
|
188 | ) | |
186 | return orig(ctx, *args, **kwargs) |
|
189 | return orig(ctx, *args, **kwargs) | |
187 |
|
190 | |||
188 | extensions.wrapfunction(mergemod, b'update', clonesparse) |
|
191 | extensions.wrapfunction(mergemod, b'update', clonesparse) | |
189 | return orig(ui, repo, *args, **opts) |
|
192 | return orig(ui, repo, *args, **opts) | |
190 |
|
193 | |||
191 |
|
194 | |||
192 | def _setupclone(ui): |
|
195 | def _setupclone(ui): | |
193 | entry = commands.table[b'clone'] |
|
196 | entry = commands.table[b'clone'] | |
194 | entry[1].append((b'', b'enable-profile', [], b'enable a sparse profile')) |
|
197 | entry[1].append((b'', b'enable-profile', [], b'enable a sparse profile')) | |
195 | entry[1].append((b'', b'include', [], b'include sparse pattern')) |
|
198 | entry[1].append((b'', b'include', [], b'include sparse pattern')) | |
196 | entry[1].append((b'', b'exclude', [], b'exclude sparse pattern')) |
|
199 | entry[1].append((b'', b'exclude', [], b'exclude sparse pattern')) | |
197 | extensions.wrapcommand(commands.table, b'clone', _clonesparsecmd) |
|
200 | extensions.wrapcommand(commands.table, b'clone', _clonesparsecmd) | |
198 |
|
201 | |||
199 |
|
202 | |||
200 | def _setupadd(ui): |
|
203 | def _setupadd(ui): | |
201 | entry = commands.table[b'add'] |
|
204 | entry = commands.table[b'add'] | |
202 | entry[1].append( |
|
205 | entry[1].append( | |
203 | ( |
|
206 | ( | |
204 | b's', |
|
207 | b's', | |
205 | b'sparse', |
|
208 | b'sparse', | |
206 | None, |
|
209 | None, | |
207 | b'also include directories of added files in sparse config', |
|
210 | b'also include directories of added files in sparse config', | |
208 | ) |
|
211 | ) | |
209 | ) |
|
212 | ) | |
210 |
|
213 | |||
211 | def _add(orig, ui, repo, *pats, **opts): |
|
214 | def _add(orig, ui, repo, *pats, **opts): | |
212 | if opts.get('sparse'): |
|
215 | if opts.get('sparse'): | |
213 | dirs = set() |
|
216 | dirs = set() | |
214 | for pat in pats: |
|
217 | for pat in pats: | |
215 | dirname, basename = util.split(pat) |
|
218 | dirname, basename = util.split(pat) | |
216 | dirs.add(dirname) |
|
219 | dirs.add(dirname) | |
217 | sparse.updateconfig(repo, list(dirs), opts, include=True) |
|
220 | sparse.updateconfig(repo, list(dirs), opts, include=True) | |
218 | return orig(ui, repo, *pats, **opts) |
|
221 | return orig(ui, repo, *pats, **opts) | |
219 |
|
222 | |||
220 | extensions.wrapcommand(commands.table, b'add', _add) |
|
223 | extensions.wrapcommand(commands.table, b'add', _add) | |
221 |
|
224 | |||
222 |
|
225 | |||
223 | def _setupdirstate(ui): |
|
226 | def _setupdirstate(ui): | |
224 | """Modify the dirstate to prevent stat'ing excluded files, |
|
227 | """Modify the dirstate to prevent stat'ing excluded files, | |
225 | and to prevent modifications to files outside the checkout. |
|
228 | and to prevent modifications to files outside the checkout. | |
226 | """ |
|
229 | """ | |
227 |
|
230 | |||
228 | def walk(orig, self, match, subrepos, unknown, ignored, full=True): |
|
231 | def walk(orig, self, match, subrepos, unknown, ignored, full=True): | |
229 | # hack to not exclude explicitly-specified paths so that they can |
|
232 | # hack to not exclude explicitly-specified paths so that they can | |
230 | # be warned later on e.g. dirstate.add() |
|
233 | # be warned later on e.g. dirstate.add() | |
231 | em = matchmod.exact(match.files()) |
|
234 | em = matchmod.exact(match.files()) | |
232 | sm = matchmod.unionmatcher([self._sparsematcher, em]) |
|
235 | sm = matchmod.unionmatcher([self._sparsematcher, em]) | |
233 | match = matchmod.intersectmatchers(match, sm) |
|
236 | match = matchmod.intersectmatchers(match, sm) | |
234 | return orig(self, match, subrepos, unknown, ignored, full) |
|
237 | return orig(self, match, subrepos, unknown, ignored, full) | |
235 |
|
238 | |||
236 | extensions.wrapfunction(dirstate.dirstate, b'walk', walk) |
|
239 | extensions.wrapfunction(dirstate.dirstate, b'walk', walk) | |
237 |
|
240 | |||
238 | # dirstate.rebuild should not add non-matching files |
|
241 | # dirstate.rebuild should not add non-matching files | |
239 | def _rebuild(orig, self, parent, allfiles, changedfiles=None): |
|
242 | def _rebuild(orig, self, parent, allfiles, changedfiles=None): | |
240 | matcher = self._sparsematcher |
|
243 | matcher = self._sparsematcher | |
241 | if not matcher.always(): |
|
244 | if not matcher.always(): | |
242 | allfiles = [f for f in allfiles if matcher(f)] |
|
245 | allfiles = [f for f in allfiles if matcher(f)] | |
243 | if changedfiles: |
|
246 | if changedfiles: | |
244 | changedfiles = [f for f in changedfiles if matcher(f)] |
|
247 | changedfiles = [f for f in changedfiles if matcher(f)] | |
245 |
|
248 | |||
246 | if changedfiles is not None: |
|
249 | if changedfiles is not None: | |
247 | # In _rebuild, these files will be deleted from the dirstate |
|
250 | # In _rebuild, these files will be deleted from the dirstate | |
248 | # when they are not found to be in allfiles |
|
251 | # when they are not found to be in allfiles | |
249 | dirstatefilestoremove = {f for f in self if not matcher(f)} |
|
252 | dirstatefilestoremove = {f for f in self if not matcher(f)} | |
250 | changedfiles = dirstatefilestoremove.union(changedfiles) |
|
253 | changedfiles = dirstatefilestoremove.union(changedfiles) | |
251 |
|
254 | |||
252 | return orig(self, parent, allfiles, changedfiles) |
|
255 | return orig(self, parent, allfiles, changedfiles) | |
253 |
|
256 | |||
254 | extensions.wrapfunction(dirstate.dirstate, b'rebuild', _rebuild) |
|
257 | extensions.wrapfunction(dirstate.dirstate, b'rebuild', _rebuild) | |
255 |
|
258 | |||
256 | # Prevent adding files that are outside the sparse checkout |
|
259 | # Prevent adding files that are outside the sparse checkout | |
257 | editfuncs = [ |
|
260 | editfuncs = [ | |
258 | b'normal', |
|
261 | b'normal', | |
259 | b'add', |
|
262 | b'add', | |
260 | b'normallookup', |
|
263 | b'normallookup', | |
261 | b'copy', |
|
264 | b'copy', | |
262 | b'remove', |
|
265 | b'remove', | |
263 | b'merge', |
|
266 | b'merge', | |
264 | ] |
|
267 | ] | |
265 | hint = _( |
|
268 | hint = _( | |
266 | b'include file with `hg debugsparse --include <pattern>` or use ' |
|
269 | b'include file with `hg debugsparse --include <pattern>` or use ' | |
267 | + b'`hg add -s <file>` to include file directory while adding' |
|
270 | + b'`hg add -s <file>` to include file directory while adding' | |
268 | ) |
|
271 | ) | |
269 | for func in editfuncs: |
|
272 | for func in editfuncs: | |
270 |
|
273 | |||
271 | def _wrapper(orig, self, *args, **kwargs): |
|
274 | def _wrapper(orig, self, *args, **kwargs): | |
272 | sparsematch = self._sparsematcher |
|
275 | sparsematch = self._sparsematcher | |
273 | if not sparsematch.always(): |
|
276 | if not sparsematch.always(): | |
274 | for f in args: |
|
277 | for f in args: | |
275 | if f is not None and not sparsematch(f) and f not in self: |
|
278 | if f is not None and not sparsematch(f) and f not in self: | |
276 | raise error.Abort( |
|
279 | raise error.Abort( | |
277 | _( |
|
280 | _( | |
278 | b"cannot add '%s' - it is outside " |
|
281 | b"cannot add '%s' - it is outside " | |
279 | b"the sparse checkout" |
|
282 | b"the sparse checkout" | |
280 | ) |
|
283 | ) | |
281 | % f, |
|
284 | % f, | |
282 | hint=hint, |
|
285 | hint=hint, | |
283 | ) |
|
286 | ) | |
284 | return orig(self, *args, **kwargs) |
|
287 | return orig(self, *args, **kwargs) | |
285 |
|
288 | |||
286 | extensions.wrapfunction(dirstate.dirstate, func, _wrapper) |
|
289 | extensions.wrapfunction(dirstate.dirstate, func, _wrapper) | |
287 |
|
290 | |||
288 |
|
291 | |||
|
292 | class DummySubrepo(subrepo.abstractsubrepo): | |||
|
293 | """Dumy subrepo is replacement of subrepo, that should be filterout from sparce. | |||
|
294 | this subrepo acts as always clean and always get/remove well. | |||
|
295 | """ | |||
|
296 | ||||
|
297 | def dirty(self, ignoreupdate=False, missing=False): | |||
|
298 | return False | |||
|
299 | ||||
|
300 | def get(self, state, overwrite=False): | |||
|
301 | return | |||
|
302 | ||||
|
303 | def remove(self): | |||
|
304 | return | |||
|
305 | ||||
|
306 | ||||
|
307 | def _setupsubrepo(ui): | |||
|
308 | """Modify the dirstate to prevent stat'ing excluded files, | |||
|
309 | and to prevent modifications to files outside the checkout. | |||
|
310 | """ | |||
|
311 | ||||
|
312 | def _state(orig, ctx, ui): | |||
|
313 | sparsematch = sparse.matcher(ctx.repo(), revs=[ctx.rev()]) | |||
|
314 | if not sparsematch.always(): | |||
|
315 | # filter allstate, leave only sparce pathes | |||
|
316 | allstate = orig(ctx, ui) | |||
|
317 | sparcestate = dict() | |||
|
318 | for (idx, item) in allstate.items(): | |||
|
319 | if sparsematch(idx): | |||
|
320 | sparcestate[idx] = item | |||
|
321 | return sparcestate | |||
|
322 | else: | |||
|
323 | return orig(ctx, ui) | |||
|
324 | ||||
|
325 | # extensions.wrapfunction(subrepoutil, b'state', _state) | |||
|
326 | ||||
|
327 | """ provide DummySubrepo for pathes out of sparse | |||
|
328 | """ | |||
|
329 | ||||
|
330 | def _subrepo(orig, ctx, path, allowwdir=False, allowcreate=True): | |||
|
331 | sparsematch = sparse.matcher(ctx.repo(), revs=[ctx.rev()]) | |||
|
332 | if not sparsematch.always(): | |||
|
333 | if not sparsematch(path): | |||
|
334 | return DummySubrepo(ctx, path) | |||
|
335 | return orig(ctx, path, allowwdir, allowcreate) | |||
|
336 | ||||
|
337 | extensions.wrapfunction(subrepo, b'subrepo', _subrepo) | |||
|
338 | ||||
|
339 | ||||
289 | @command( |
|
340 | @command( | |
290 | b'debugsparse', |
|
341 | b'debugsparse', | |
291 | [ |
|
342 | [ | |
292 | (b'I', b'include', False, _(b'include files in the sparse checkout')), |
|
343 | (b'I', b'include', False, _(b'include files in the sparse checkout')), | |
293 | (b'X', b'exclude', False, _(b'exclude files in the sparse checkout')), |
|
344 | (b'X', b'exclude', False, _(b'exclude files in the sparse checkout')), | |
294 | (b'd', b'delete', False, _(b'delete an include/exclude rule')), |
|
345 | (b'd', b'delete', False, _(b'delete an include/exclude rule')), | |
295 | ( |
|
346 | ( | |
296 | b'f', |
|
347 | b'f', | |
297 | b'force', |
|
348 | b'force', | |
298 | False, |
|
349 | False, | |
299 | _(b'allow changing rules even with pending changes'), |
|
350 | _(b'allow changing rules even with pending changes'), | |
300 | ), |
|
351 | ), | |
301 | (b'', b'enable-profile', False, _(b'enables the specified profile')), |
|
352 | (b'', b'enable-profile', False, _(b'enables the specified profile')), | |
302 | (b'', b'disable-profile', False, _(b'disables the specified profile')), |
|
353 | (b'', b'disable-profile', False, _(b'disables the specified profile')), | |
303 | (b'', b'import-rules', False, _(b'imports rules from a file')), |
|
354 | (b'', b'import-rules', False, _(b'imports rules from a file')), | |
304 | (b'', b'clear-rules', False, _(b'clears local include/exclude rules')), |
|
355 | (b'', b'clear-rules', False, _(b'clears local include/exclude rules')), | |
305 | ( |
|
356 | ( | |
306 | b'', |
|
357 | b'', | |
307 | b'refresh', |
|
358 | b'refresh', | |
308 | False, |
|
359 | False, | |
309 | _(b'updates the working after sparseness changes'), |
|
360 | _(b'updates the working after sparseness changes'), | |
310 | ), |
|
361 | ), | |
311 | (b'', b'reset', False, _(b'makes the repo full again')), |
|
362 | (b'', b'reset', False, _(b'makes the repo full again')), | |
312 | ] |
|
363 | ] | |
313 | + commands.templateopts, |
|
364 | + commands.templateopts, | |
314 | _(b'[--OPTION] PATTERN...'), |
|
365 | _(b'[--OPTION] PATTERN...'), | |
315 | helpbasic=True, |
|
366 | helpbasic=True, | |
316 | ) |
|
367 | ) | |
317 | def debugsparse(ui, repo, *pats, **opts): |
|
368 | def debugsparse(ui, repo, *pats, **opts): | |
318 | """make the current checkout sparse, or edit the existing checkout |
|
369 | """make the current checkout sparse, or edit the existing checkout | |
319 |
|
370 | |||
320 | The sparse command is used to make the current checkout sparse. |
|
371 | The sparse command is used to make the current checkout sparse. | |
321 | This means files that don't meet the sparse condition will not be |
|
372 | This means files that don't meet the sparse condition will not be | |
322 | written to disk, or show up in any working copy operations. It does |
|
373 | written to disk, or show up in any working copy operations. It does | |
323 | not affect files in history in any way. |
|
374 | not affect files in history in any way. | |
324 |
|
375 | |||
325 | Passing no arguments prints the currently applied sparse rules. |
|
376 | Passing no arguments prints the currently applied sparse rules. | |
326 |
|
377 | |||
327 | --include and --exclude are used to add and remove files from the sparse |
|
378 | --include and --exclude are used to add and remove files from the sparse | |
328 | checkout. The effects of adding an include or exclude rule are applied |
|
379 | checkout. The effects of adding an include or exclude rule are applied | |
329 | immediately. If applying the new rule would cause a file with pending |
|
380 | immediately. If applying the new rule would cause a file with pending | |
330 | changes to be added or removed, the command will fail. Pass --force to |
|
381 | changes to be added or removed, the command will fail. Pass --force to | |
331 | force a rule change even with pending changes (the changes on disk will |
|
382 | force a rule change even with pending changes (the changes on disk will | |
332 | be preserved). |
|
383 | be preserved). | |
333 |
|
384 | |||
334 | --delete removes an existing include/exclude rule. The effects are |
|
385 | --delete removes an existing include/exclude rule. The effects are | |
335 | immediate. |
|
386 | immediate. | |
336 |
|
387 | |||
337 | --refresh refreshes the files on disk based on the sparse rules. This is |
|
388 | --refresh refreshes the files on disk based on the sparse rules. This is | |
338 | only necessary if .hg/sparse was changed by hand. |
|
389 | only necessary if .hg/sparse was changed by hand. | |
339 |
|
390 | |||
340 | --enable-profile and --disable-profile accept a path to a .hgsparse file. |
|
391 | --enable-profile and --disable-profile accept a path to a .hgsparse file. | |
341 | This allows defining sparse checkouts and tracking them inside the |
|
392 | This allows defining sparse checkouts and tracking them inside the | |
342 | repository. This is useful for defining commonly used sparse checkouts for |
|
393 | repository. This is useful for defining commonly used sparse checkouts for | |
343 | many people to use. As the profile definition changes over time, the sparse |
|
394 | many people to use. As the profile definition changes over time, the sparse | |
344 | checkout will automatically be updated appropriately, depending on which |
|
395 | checkout will automatically be updated appropriately, depending on which | |
345 | changeset is checked out. Changes to .hgsparse are not applied until they |
|
396 | changeset is checked out. Changes to .hgsparse are not applied until they | |
346 | have been committed. |
|
397 | have been committed. | |
347 |
|
398 | |||
348 | --import-rules accepts a path to a file containing rules in the .hgsparse |
|
399 | --import-rules accepts a path to a file containing rules in the .hgsparse | |
349 | format, allowing you to add --include, --exclude and --enable-profile rules |
|
400 | format, allowing you to add --include, --exclude and --enable-profile rules | |
350 | in bulk. Like the --include, --exclude and --enable-profile switches, the |
|
401 | in bulk. Like the --include, --exclude and --enable-profile switches, the | |
351 | changes are applied immediately. |
|
402 | changes are applied immediately. | |
352 |
|
403 | |||
353 | --clear-rules removes all local include and exclude rules, while leaving |
|
404 | --clear-rules removes all local include and exclude rules, while leaving | |
354 | any enabled profiles in place. |
|
405 | any enabled profiles in place. | |
355 |
|
406 | |||
356 | Returns 0 if editing the sparse checkout succeeds. |
|
407 | Returns 0 if editing the sparse checkout succeeds. | |
357 | """ |
|
408 | """ | |
358 | opts = pycompat.byteskwargs(opts) |
|
409 | opts = pycompat.byteskwargs(opts) | |
359 | include = opts.get(b'include') |
|
410 | include = opts.get(b'include') | |
360 | exclude = opts.get(b'exclude') |
|
411 | exclude = opts.get(b'exclude') | |
361 | force = opts.get(b'force') |
|
412 | force = opts.get(b'force') | |
362 | enableprofile = opts.get(b'enable_profile') |
|
413 | enableprofile = opts.get(b'enable_profile') | |
363 | disableprofile = opts.get(b'disable_profile') |
|
414 | disableprofile = opts.get(b'disable_profile') | |
364 | importrules = opts.get(b'import_rules') |
|
415 | importrules = opts.get(b'import_rules') | |
365 | clearrules = opts.get(b'clear_rules') |
|
416 | clearrules = opts.get(b'clear_rules') | |
366 | delete = opts.get(b'delete') |
|
417 | delete = opts.get(b'delete') | |
367 | refresh = opts.get(b'refresh') |
|
418 | refresh = opts.get(b'refresh') | |
368 | reset = opts.get(b'reset') |
|
419 | reset = opts.get(b'reset') | |
369 | count = sum( |
|
420 | count = sum( | |
370 | [ |
|
421 | [ | |
371 | include, |
|
422 | include, | |
372 | exclude, |
|
423 | exclude, | |
373 | enableprofile, |
|
424 | enableprofile, | |
374 | disableprofile, |
|
425 | disableprofile, | |
375 | delete, |
|
426 | delete, | |
376 | importrules, |
|
427 | importrules, | |
377 | refresh, |
|
428 | refresh, | |
378 | clearrules, |
|
429 | clearrules, | |
379 | reset, |
|
430 | reset, | |
380 | ] |
|
431 | ] | |
381 | ) |
|
432 | ) | |
382 | if count > 1: |
|
433 | if count > 1: | |
383 | raise error.Abort(_(b"too many flags specified")) |
|
434 | raise error.Abort(_(b"too many flags specified")) | |
384 |
|
435 | |||
385 | if count == 0: |
|
436 | if count == 0: | |
386 | if repo.vfs.exists(b'sparse'): |
|
437 | if repo.vfs.exists(b'sparse'): | |
387 | ui.status(repo.vfs.read(b"sparse") + b"\n") |
|
438 | ui.status(repo.vfs.read(b"sparse") + b"\n") | |
388 | temporaryincludes = sparse.readtemporaryincludes(repo) |
|
439 | temporaryincludes = sparse.readtemporaryincludes(repo) | |
389 | if temporaryincludes: |
|
440 | if temporaryincludes: | |
390 | ui.status( |
|
441 | ui.status( | |
391 | _(b"Temporarily Included Files (for merge/rebase):\n") |
|
442 | _(b"Temporarily Included Files (for merge/rebase):\n") | |
392 | ) |
|
443 | ) | |
393 | ui.status((b"\n".join(temporaryincludes) + b"\n")) |
|
444 | ui.status((b"\n".join(temporaryincludes) + b"\n")) | |
394 | return |
|
445 | return | |
395 | else: |
|
446 | else: | |
396 | raise error.Abort( |
|
447 | raise error.Abort( | |
397 | _( |
|
448 | _( | |
398 | b'the debugsparse command is only supported on' |
|
449 | b'the debugsparse command is only supported on' | |
399 | b' sparse repositories' |
|
450 | b' sparse repositories' | |
400 | ) |
|
451 | ) | |
401 | ) |
|
452 | ) | |
402 |
|
453 | |||
403 | if include or exclude or delete or reset or enableprofile or disableprofile: |
|
454 | if include or exclude or delete or reset or enableprofile or disableprofile: | |
404 | sparse.updateconfig( |
|
455 | sparse.updateconfig( | |
405 | repo, |
|
456 | repo, | |
406 | pats, |
|
457 | pats, | |
407 | opts, |
|
458 | opts, | |
408 | include=include, |
|
459 | include=include, | |
409 | exclude=exclude, |
|
460 | exclude=exclude, | |
410 | reset=reset, |
|
461 | reset=reset, | |
411 | delete=delete, |
|
462 | delete=delete, | |
412 | enableprofile=enableprofile, |
|
463 | enableprofile=enableprofile, | |
413 | disableprofile=disableprofile, |
|
464 | disableprofile=disableprofile, | |
414 | force=force, |
|
465 | force=force, | |
415 | ) |
|
466 | ) | |
416 |
|
467 | |||
417 | if importrules: |
|
468 | if importrules: | |
418 | sparse.importfromfiles(repo, opts, pats, force=force) |
|
469 | sparse.importfromfiles(repo, opts, pats, force=force) | |
419 |
|
470 | |||
420 | if clearrules: |
|
471 | if clearrules: | |
421 | sparse.clearrules(repo, force=force) |
|
472 | sparse.clearrules(repo, force=force) | |
422 |
|
473 | |||
423 | if refresh: |
|
474 | if refresh: | |
424 | try: |
|
475 | try: | |
425 | wlock = repo.wlock() |
|
476 | wlock = repo.wlock() | |
426 | fcounts = map( |
|
477 | fcounts = map( | |
427 | len, |
|
478 | len, | |
428 | sparse.refreshwdir( |
|
479 | sparse.refreshwdir( | |
429 | repo, repo.status(), sparse.matcher(repo), force=force |
|
480 | repo, repo.status(), sparse.matcher(repo), force=force | |
430 | ), |
|
481 | ), | |
431 | ) |
|
482 | ) | |
432 | sparse.printchanges( |
|
483 | sparse.printchanges( | |
433 | ui, |
|
484 | ui, | |
434 | opts, |
|
485 | opts, | |
435 | added=fcounts[0], |
|
486 | added=fcounts[0], | |
436 | dropped=fcounts[1], |
|
487 | dropped=fcounts[1], | |
437 | conflicting=fcounts[2], |
|
488 | conflicting=fcounts[2], | |
438 | ) |
|
489 | ) | |
439 | finally: |
|
490 | finally: | |
440 | wlock.release() |
|
491 | wlock.release() |
@@ -1,510 +1,528 b'' | |||||
1 | # subrepoutil.py - sub-repository operations and substate handling |
|
1 | # subrepoutil.py - sub-repository operations and substate handling | |
2 | # |
|
2 | # | |
3 | # Copyright 2009-2010 Olivia Mackall <olivia@selenic.com> |
|
3 | # Copyright 2009-2010 Olivia Mackall <olivia@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | import errno |
|
10 | import errno | |
11 | import os |
|
11 | import os | |
12 | import posixpath |
|
12 | import posixpath | |
13 | import re |
|
13 | import re | |
14 |
|
14 | |||
15 | from .i18n import _ |
|
15 | from .i18n import _ | |
16 | from .pycompat import getattr |
|
16 | from .pycompat import getattr | |
17 | from . import ( |
|
17 | from . import ( | |
18 | config, |
|
18 | config, | |
19 | error, |
|
19 | error, | |
20 | filemerge, |
|
20 | filemerge, | |
21 | pathutil, |
|
21 | pathutil, | |
22 | phases, |
|
22 | phases, | |
23 | pycompat, |
|
23 | pycompat, | |
|
24 | sparse, | |||
24 | util, |
|
25 | util, | |
25 | ) |
|
26 | ) | |
26 | from .utils import ( |
|
27 | from .utils import ( | |
27 | stringutil, |
|
28 | stringutil, | |
28 | urlutil, |
|
29 | urlutil, | |
29 | ) |
|
30 | ) | |
30 |
|
31 | |||
31 | nullstate = (b'', b'', b'empty') |
|
32 | nullstate = (b'', b'', b'empty') | |
32 |
|
33 | |||
33 | if pycompat.TYPE_CHECKING: |
|
34 | if pycompat.TYPE_CHECKING: | |
34 | from typing import ( |
|
35 | from typing import ( | |
35 | Any, |
|
36 | Any, | |
36 | Dict, |
|
37 | Dict, | |
37 | List, |
|
38 | List, | |
38 | Optional, |
|
39 | Optional, | |
39 | Set, |
|
40 | Set, | |
40 | Tuple, |
|
41 | Tuple, | |
41 | ) |
|
42 | ) | |
42 | from . import ( |
|
43 | from . import ( | |
43 | context, |
|
44 | context, | |
44 | localrepo, |
|
45 | localrepo, | |
45 | match as matchmod, |
|
46 | match as matchmod, | |
46 | scmutil, |
|
47 | scmutil, | |
47 | subrepo, |
|
48 | subrepo, | |
48 | ui as uimod, |
|
49 | ui as uimod, | |
49 | ) |
|
50 | ) | |
50 |
|
51 | |||
51 | Substate = Dict[bytes, Tuple[bytes, bytes, bytes]] |
|
52 | Substate = Dict[bytes, Tuple[bytes, bytes, bytes]] | |
52 |
|
53 | |||
53 |
|
54 | |||
54 | def state(ctx, ui): |
|
55 | def state(ctx, ui): | |
55 | # type: (context.changectx, uimod.ui) -> Substate |
|
56 | # type: (context.changectx, uimod.ui) -> Substate | |
56 | """return a state dict, mapping subrepo paths configured in .hgsub |
|
57 | """return a state dict, mapping subrepo paths configured in .hgsub | |
57 | to tuple: (source from .hgsub, revision from .hgsubstate, kind |
|
58 | to tuple: (source from .hgsub, revision from .hgsubstate, kind | |
58 | (key in types dict)) |
|
59 | (key in types dict)) | |
59 | """ |
|
60 | """ | |
60 | p = config.config() |
|
61 | p = config.config() | |
61 | repo = ctx.repo() |
|
62 | repo = ctx.repo() | |
62 |
|
63 | |||
63 | def read(f, sections=None, remap=None): |
|
64 | def read(f, sections=None, remap=None): | |
64 | if f in ctx: |
|
65 | if f in ctx: | |
65 | try: |
|
66 | try: | |
66 | data = ctx[f].data() |
|
67 | data = ctx[f].data() | |
67 | except IOError as err: |
|
68 | except IOError as err: | |
68 | if err.errno != errno.ENOENT: |
|
69 | if err.errno != errno.ENOENT: | |
69 | raise |
|
70 | raise | |
70 | # handle missing subrepo spec files as removed |
|
71 | # handle missing subrepo spec files as removed | |
71 | ui.warn( |
|
72 | ui.warn( | |
72 | _(b"warning: subrepo spec file \'%s\' not found\n") |
|
73 | _(b"warning: subrepo spec file \'%s\' not found\n") | |
73 | % repo.pathto(f) |
|
74 | % repo.pathto(f) | |
74 | ) |
|
75 | ) | |
75 | return |
|
76 | return | |
76 | p.parse(f, data, sections, remap, read) |
|
77 | p.parse(f, data, sections, remap, read) | |
77 | else: |
|
78 | else: | |
78 | raise error.Abort( |
|
79 | raise error.Abort( | |
79 | _(b"subrepo spec file \'%s\' not found") % repo.pathto(f) |
|
80 | _(b"subrepo spec file \'%s\' not found") % repo.pathto(f) | |
80 | ) |
|
81 | ) | |
81 |
|
82 | |||
82 | if b'.hgsub' in ctx: |
|
83 | if b'.hgsub' in ctx: | |
83 | read(b'.hgsub') |
|
84 | read(b'.hgsub') | |
84 |
|
85 | |||
85 | for path, src in ui.configitems(b'subpaths'): |
|
86 | for path, src in ui.configitems(b'subpaths'): | |
86 | p.set(b'subpaths', path, src, ui.configsource(b'subpaths', path)) |
|
87 | p.set(b'subpaths', path, src, ui.configsource(b'subpaths', path)) | |
87 |
|
88 | |||
88 | rev = {} |
|
89 | rev = {} | |
89 | if b'.hgsubstate' in ctx: |
|
90 | if b'.hgsubstate' in ctx: | |
90 | try: |
|
91 | try: | |
91 | for i, l in enumerate(ctx[b'.hgsubstate'].data().splitlines()): |
|
92 | for i, l in enumerate(ctx[b'.hgsubstate'].data().splitlines()): | |
92 | l = l.lstrip() |
|
93 | l = l.lstrip() | |
93 | if not l: |
|
94 | if not l: | |
94 | continue |
|
95 | continue | |
95 | try: |
|
96 | try: | |
96 | revision, path = l.split(b" ", 1) |
|
97 | revision, path = l.split(b" ", 1) | |
97 | except ValueError: |
|
98 | except ValueError: | |
98 | raise error.Abort( |
|
99 | raise error.Abort( | |
99 | _( |
|
100 | _( | |
100 | b"invalid subrepository revision " |
|
101 | b"invalid subrepository revision " | |
101 | b"specifier in \'%s\' line %d" |
|
102 | b"specifier in \'%s\' line %d" | |
102 | ) |
|
103 | ) | |
103 | % (repo.pathto(b'.hgsubstate'), (i + 1)) |
|
104 | % (repo.pathto(b'.hgsubstate'), (i + 1)) | |
104 | ) |
|
105 | ) | |
105 | rev[path] = revision |
|
106 | rev[path] = revision | |
106 | except IOError as err: |
|
107 | except IOError as err: | |
107 | if err.errno != errno.ENOENT: |
|
108 | if err.errno != errno.ENOENT: | |
108 | raise |
|
109 | raise | |
109 |
|
110 | |||
110 | def remap(src): |
|
111 | def remap(src): | |
111 | # type: (bytes) -> bytes |
|
112 | # type: (bytes) -> bytes | |
112 | for pattern, repl in p.items(b'subpaths'): |
|
113 | for pattern, repl in p.items(b'subpaths'): | |
113 | # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub |
|
114 | # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub | |
114 | # does a string decode. |
|
115 | # does a string decode. | |
115 | repl = stringutil.escapestr(repl) |
|
116 | repl = stringutil.escapestr(repl) | |
116 | # However, we still want to allow back references to go |
|
117 | # However, we still want to allow back references to go | |
117 | # through unharmed, so we turn r'\\1' into r'\1'. Again, |
|
118 | # through unharmed, so we turn r'\\1' into r'\1'. Again, | |
118 | # extra escapes are needed because re.sub string decodes. |
|
119 | # extra escapes are needed because re.sub string decodes. | |
119 | repl = re.sub(br'\\\\([0-9]+)', br'\\\1', repl) |
|
120 | repl = re.sub(br'\\\\([0-9]+)', br'\\\1', repl) | |
120 | try: |
|
121 | try: | |
121 | src = re.sub(pattern, repl, src, 1) |
|
122 | src = re.sub(pattern, repl, src, 1) | |
122 | except re.error as e: |
|
123 | except re.error as e: | |
123 | raise error.Abort( |
|
124 | raise error.Abort( | |
124 | _(b"bad subrepository pattern in %s: %s") |
|
125 | _(b"bad subrepository pattern in %s: %s") | |
125 | % ( |
|
126 | % ( | |
126 | p.source(b'subpaths', pattern), |
|
127 | p.source(b'subpaths', pattern), | |
127 | stringutil.forcebytestr(e), |
|
128 | stringutil.forcebytestr(e), | |
128 | ) |
|
129 | ) | |
129 | ) |
|
130 | ) | |
130 | return src |
|
131 | return src | |
131 |
|
132 | |||
132 | state = {} |
|
133 | state = {} | |
133 | for path, src in p.items(b''): # type: bytes |
|
134 | for path, src in p.items(b''): # type: bytes | |
134 | kind = b'hg' |
|
135 | kind = b'hg' | |
135 | if src.startswith(b'['): |
|
136 | if src.startswith(b'['): | |
136 | if b']' not in src: |
|
137 | if b']' not in src: | |
137 | raise error.Abort(_(b'missing ] in subrepository source')) |
|
138 | raise error.Abort(_(b'missing ] in subrepository source')) | |
138 | kind, src = src.split(b']', 1) |
|
139 | kind, src = src.split(b']', 1) | |
139 | kind = kind[1:] |
|
140 | kind = kind[1:] | |
140 | src = src.lstrip() # strip any extra whitespace after ']' |
|
141 | src = src.lstrip() # strip any extra whitespace after ']' | |
141 |
|
142 | |||
142 | if not urlutil.url(src).isabs(): |
|
143 | if not urlutil.url(src).isabs(): | |
143 | parent = _abssource(repo, abort=False) |
|
144 | parent = _abssource(repo, abort=False) | |
144 | if parent: |
|
145 | if parent: | |
145 | parent = urlutil.url(parent) |
|
146 | parent = urlutil.url(parent) | |
146 | parent.path = posixpath.join(parent.path or b'', src) |
|
147 | parent.path = posixpath.join(parent.path or b'', src) | |
147 | parent.path = posixpath.normpath(parent.path) |
|
148 | parent.path = posixpath.normpath(parent.path) | |
148 | joined = bytes(parent) |
|
149 | joined = bytes(parent) | |
149 | # Remap the full joined path and use it if it changes, |
|
150 | # Remap the full joined path and use it if it changes, | |
150 | # else remap the original source. |
|
151 | # else remap the original source. | |
151 | remapped = remap(joined) |
|
152 | remapped = remap(joined) | |
152 | if remapped == joined: |
|
153 | if remapped == joined: | |
153 | src = remap(src) |
|
154 | src = remap(src) | |
154 | else: |
|
155 | else: | |
155 | src = remapped |
|
156 | src = remapped | |
156 |
|
157 | |||
157 | src = remap(src) |
|
158 | src = remap(src) | |
158 | state[util.pconvert(path)] = (src.strip(), rev.get(path, b''), kind) |
|
159 | state[util.pconvert(path)] = (src.strip(), rev.get(path, b''), kind) | |
159 |
|
160 | |||
160 | return state |
|
161 | return state | |
161 |
|
162 | |||
162 |
|
163 | |||
163 | def writestate(repo, state): |
|
164 | def writestate(repo, state): | |
164 | # type: (localrepo.localrepository, Substate) -> None |
|
165 | # type: (localrepo.localrepository, Substate) -> None | |
165 | """rewrite .hgsubstate in (outer) repo with these subrepo states""" |
|
166 | """rewrite .hgsubstate in (outer) repo with these subrepo states""" | |
166 | lines = [ |
|
167 | lines = [ | |
167 | b'%s %s\n' % (state[s][1], s) |
|
168 | b'%s %s\n' % (state[s][1], s) | |
168 | for s in sorted(state) |
|
169 | for s in sorted(state) | |
169 | if state[s][1] != nullstate[1] |
|
170 | if state[s][1] != nullstate[1] | |
170 | ] |
|
171 | ] | |
171 | repo.wwrite(b'.hgsubstate', b''.join(lines), b'') |
|
172 | repo.wwrite(b'.hgsubstate', b''.join(lines), b'') | |
172 |
|
173 | |||
173 |
|
174 | |||
174 | def submerge(repo, wctx, mctx, actx, overwrite, labels=None): |
|
175 | def submerge(repo, wctx, mctx, actx, overwrite, labels=None): | |
175 | # type: (localrepo.localrepository, context.workingctx, context.changectx, context.changectx, bool, Optional[Any]) -> Substate |
|
176 | # type: (localrepo.localrepository, context.workingctx, context.changectx, context.changectx, bool, Optional[Any]) -> Substate | |
176 | # TODO: type the `labels` arg |
|
177 | # TODO: type the `labels` arg | |
177 | """delegated from merge.applyupdates: merging of .hgsubstate file |
|
178 | """delegated from merge.applyupdates: merging of .hgsubstate file | |
178 | in working context, merging context and ancestor context""" |
|
179 | in working context, merging context and ancestor context""" | |
179 | if mctx == actx: # backwards? |
|
180 | if mctx == actx: # backwards? | |
180 | actx = wctx.p1() |
|
181 | actx = wctx.p1() | |
181 | s1 = wctx.substate |
|
182 | s1 = wctx.substate | |
182 | s2 = mctx.substate |
|
183 | s2 = mctx.substate | |
183 | sa = actx.substate |
|
184 | sa = actx.substate | |
184 | sm = {} |
|
185 | sm = {} | |
185 |
|
186 | |||
|
187 | s1match = sparse.matcher(repo, revs=[wctx.rev()]) | |||
|
188 | s2match = sparse.matcher(repo, revs=[mctx.rev()]) | |||
|
189 | ||||
186 | repo.ui.debug(b"subrepo merge %s %s %s\n" % (wctx, mctx, actx)) |
|
190 | repo.ui.debug(b"subrepo merge %s %s %s\n" % (wctx, mctx, actx)) | |
187 |
|
191 | |||
188 | def debug(s, msg, r=b""): |
|
192 | def debug(s, msg, r=b""): | |
189 | if r: |
|
193 | if r: | |
190 | r = b"%s:%s:%s" % r |
|
194 | r = b"%s:%s:%s" % r | |
191 | repo.ui.debug(b" subrepo %s: %s %s\n" % (s, msg, r)) |
|
195 | repo.ui.debug(b" subrepo %s: %s %s\n" % (s, msg, r)) | |
192 |
|
196 | |||
193 | promptssrc = filemerge.partextras(labels) |
|
197 | promptssrc = filemerge.partextras(labels) | |
194 | for s, l in sorted(pycompat.iteritems(s1)): |
|
198 | for s, l in sorted(pycompat.iteritems(s1)): | |
|
199 | if not s1match(s): | |||
|
200 | sm[s] = l # ignore changes out of sparse | |||
|
201 | continue | |||
195 | a = sa.get(s, nullstate) |
|
202 | a = sa.get(s, nullstate) | |
196 | ld = l # local state with possible dirty flag for compares |
|
203 | ld = l # local state with possible dirty flag for compares | |
197 | if wctx.sub(s).dirty(): |
|
204 | if wctx.sub(s).dirty(): | |
198 | ld = (l[0], l[1] + b"+") |
|
205 | ld = (l[0], l[1] + b"+") | |
199 | if wctx == actx: # overwrite |
|
206 | if wctx == actx: # overwrite | |
200 | a = ld |
|
207 | a = ld | |
201 |
|
208 | |||
202 | prompts = promptssrc.copy() |
|
209 | prompts = promptssrc.copy() | |
203 | prompts[b's'] = s |
|
210 | prompts[b's'] = s | |
204 | if s in s2: |
|
211 | if s in s2: | |
|
212 | if not s2match(s): | |||
|
213 | sm[s] = l # ignore changes out of sparse | |||
|
214 | continue | |||
205 | r = s2[s] |
|
215 | r = s2[s] | |
206 | if ld == r or r == a: # no change or local is newer |
|
216 | if ld == r or r == a: # no change or local is newer | |
207 | sm[s] = l |
|
217 | sm[s] = l | |
208 | continue |
|
218 | continue | |
209 | elif ld == a: # other side changed |
|
219 | elif ld == a: # other side changed | |
210 | debug(s, b"other changed, get", r) |
|
220 | debug(s, b"other changed, get", r) | |
211 | wctx.sub(s).get(r, overwrite) |
|
221 | wctx.sub(s).get(r, overwrite) | |
212 | sm[s] = r |
|
222 | sm[s] = r | |
213 | elif ld[0] != r[0]: # sources differ |
|
223 | elif ld[0] != r[0]: # sources differ | |
214 | prompts[b'lo'] = l[0] |
|
224 | prompts[b'lo'] = l[0] | |
215 | prompts[b'ro'] = r[0] |
|
225 | prompts[b'ro'] = r[0] | |
216 | if repo.ui.promptchoice( |
|
226 | if repo.ui.promptchoice( | |
217 | _( |
|
227 | _( | |
218 | b' subrepository sources for %(s)s differ\n' |
|
228 | b' subrepository sources for %(s)s differ\n' | |
219 | b'you can use (l)ocal%(l)s source (%(lo)s)' |
|
229 | b'you can use (l)ocal%(l)s source (%(lo)s)' | |
220 | b' or (r)emote%(o)s source (%(ro)s).\n' |
|
230 | b' or (r)emote%(o)s source (%(ro)s).\n' | |
221 | b'what do you want to do?' |
|
231 | b'what do you want to do?' | |
222 | b'$$ &Local $$ &Remote' |
|
232 | b'$$ &Local $$ &Remote' | |
223 | ) |
|
233 | ) | |
224 | % prompts, |
|
234 | % prompts, | |
225 | 0, |
|
235 | 0, | |
226 | ): |
|
236 | ): | |
227 | debug(s, b"prompt changed, get", r) |
|
237 | debug(s, b"prompt changed, get", r) | |
228 | wctx.sub(s).get(r, overwrite) |
|
238 | wctx.sub(s).get(r, overwrite) | |
229 | sm[s] = r |
|
239 | sm[s] = r | |
230 | elif ld[1] == a[1]: # local side is unchanged |
|
240 | elif ld[1] == a[1]: # local side is unchanged | |
231 | debug(s, b"other side changed, get", r) |
|
241 | debug(s, b"other side changed, get", r) | |
232 | wctx.sub(s).get(r, overwrite) |
|
242 | wctx.sub(s).get(r, overwrite) | |
233 | sm[s] = r |
|
243 | sm[s] = r | |
234 | else: |
|
244 | else: | |
235 | debug(s, b"both sides changed") |
|
245 | debug(s, b"both sides changed") | |
236 | srepo = wctx.sub(s) |
|
246 | srepo = wctx.sub(s) | |
237 | prompts[b'sl'] = srepo.shortid(l[1]) |
|
247 | prompts[b'sl'] = srepo.shortid(l[1]) | |
238 | prompts[b'sr'] = srepo.shortid(r[1]) |
|
248 | prompts[b'sr'] = srepo.shortid(r[1]) | |
239 | option = repo.ui.promptchoice( |
|
249 | option = repo.ui.promptchoice( | |
240 | _( |
|
250 | _( | |
241 | b' subrepository %(s)s diverged (local revision: %(sl)s, ' |
|
251 | b' subrepository %(s)s diverged (local revision: %(sl)s, ' | |
242 | b'remote revision: %(sr)s)\n' |
|
252 | b'remote revision: %(sr)s)\n' | |
243 | b'you can (m)erge, keep (l)ocal%(l)s or keep ' |
|
253 | b'you can (m)erge, keep (l)ocal%(l)s or keep ' | |
244 | b'(r)emote%(o)s.\n' |
|
254 | b'(r)emote%(o)s.\n' | |
245 | b'what do you want to do?' |
|
255 | b'what do you want to do?' | |
246 | b'$$ &Merge $$ &Local $$ &Remote' |
|
256 | b'$$ &Merge $$ &Local $$ &Remote' | |
247 | ) |
|
257 | ) | |
248 | % prompts, |
|
258 | % prompts, | |
249 | 0, |
|
259 | 0, | |
250 | ) |
|
260 | ) | |
251 | if option == 0: |
|
261 | if option == 0: | |
252 | wctx.sub(s).merge(r) |
|
262 | wctx.sub(s).merge(r) | |
253 | sm[s] = l |
|
263 | sm[s] = l | |
254 | debug(s, b"merge with", r) |
|
264 | debug(s, b"merge with", r) | |
255 | elif option == 1: |
|
265 | elif option == 1: | |
256 | sm[s] = l |
|
266 | sm[s] = l | |
257 | debug(s, b"keep local subrepo revision", l) |
|
267 | debug(s, b"keep local subrepo revision", l) | |
258 | else: |
|
268 | else: | |
259 | wctx.sub(s).get(r, overwrite) |
|
269 | wctx.sub(s).get(r, overwrite) | |
260 | sm[s] = r |
|
270 | sm[s] = r | |
261 | debug(s, b"get remote subrepo revision", r) |
|
271 | debug(s, b"get remote subrepo revision", r) | |
262 | elif ld == a: # remote removed, local unchanged |
|
272 | elif ld == a: # remote removed, local unchanged | |
263 | debug(s, b"remote removed, remove") |
|
273 | debug(s, b"remote removed, remove") | |
264 | wctx.sub(s).remove() |
|
274 | wctx.sub(s).remove() | |
265 | elif a == nullstate: # not present in remote or ancestor |
|
275 | elif a == nullstate: # not present in remote or ancestor | |
266 | debug(s, b"local added, keep") |
|
276 | debug(s, b"local added, keep") | |
267 | sm[s] = l |
|
277 | sm[s] = l | |
268 | continue |
|
278 | continue | |
269 | else: |
|
279 | else: | |
270 | if repo.ui.promptchoice( |
|
280 | if repo.ui.promptchoice( | |
271 | _( |
|
281 | _( | |
272 | b' local%(l)s changed subrepository %(s)s' |
|
282 | b' local%(l)s changed subrepository %(s)s' | |
273 | b' which remote%(o)s removed\n' |
|
283 | b' which remote%(o)s removed\n' | |
274 | b'use (c)hanged version or (d)elete?' |
|
284 | b'use (c)hanged version or (d)elete?' | |
275 | b'$$ &Changed $$ &Delete' |
|
285 | b'$$ &Changed $$ &Delete' | |
276 | ) |
|
286 | ) | |
277 | % prompts, |
|
287 | % prompts, | |
278 | 0, |
|
288 | 0, | |
279 | ): |
|
289 | ): | |
280 | debug(s, b"prompt remove") |
|
290 | debug(s, b"prompt remove") | |
281 | wctx.sub(s).remove() |
|
291 | wctx.sub(s).remove() | |
282 |
|
292 | |||
283 | for s, r in sorted(s2.items()): |
|
293 | for s, r in sorted(s2.items()): | |
284 | if s in s1: |
|
294 | if s in s1: | |
285 | continue |
|
295 | continue | |
286 | elif s not in sa: |
|
296 | elif s not in sa: | |
287 | debug(s, b"remote added, get", r) |
|
297 | debug(s, b"remote added, get", r) | |
288 | mctx.sub(s).get(r) |
|
298 | mctx.sub(s).get(r) | |
289 | sm[s] = r |
|
299 | sm[s] = r | |
290 | elif r != sa[s]: |
|
300 | elif r != sa[s]: | |
|
301 | if not s2match(s): | |||
|
302 | # ignore changes out of sparse, | |||
|
303 | continue | |||
|
304 | elif not s1match(s): | |||
|
305 | # recreate changes out of sparse, | |||
|
306 | # sm[s] = r | |||
|
307 | continue | |||
|
308 | ||||
291 | prompts = promptssrc.copy() |
|
309 | prompts = promptssrc.copy() | |
292 | prompts[b's'] = s |
|
310 | prompts[b's'] = s | |
293 | if ( |
|
311 | if ( | |
294 | repo.ui.promptchoice( |
|
312 | repo.ui.promptchoice( | |
295 | _( |
|
313 | _( | |
296 | b' remote%(o)s changed subrepository %(s)s' |
|
314 | b' remote%(o)s changed subrepository %(s)s' | |
297 | b' which local%(l)s removed\n' |
|
315 | b' which local%(l)s removed\n' | |
298 | b'use (c)hanged version or (d)elete?' |
|
316 | b'use (c)hanged version or (d)elete?' | |
299 | b'$$ &Changed $$ &Delete' |
|
317 | b'$$ &Changed $$ &Delete' | |
300 | ) |
|
318 | ) | |
301 | % prompts, |
|
319 | % prompts, | |
302 | 0, |
|
320 | 0, | |
303 | ) |
|
321 | ) | |
304 | == 0 |
|
322 | == 0 | |
305 | ): |
|
323 | ): | |
306 | debug(s, b"prompt recreate", r) |
|
324 | debug(s, b"prompt recreate", r) | |
307 | mctx.sub(s).get(r) |
|
325 | mctx.sub(s).get(r) | |
308 | sm[s] = r |
|
326 | sm[s] = r | |
309 |
|
327 | |||
310 | # record merged .hgsubstate |
|
328 | # record merged .hgsubstate | |
311 | writestate(repo, sm) |
|
329 | writestate(repo, sm) | |
312 | return sm |
|
330 | return sm | |
313 |
|
331 | |||
314 |
|
332 | |||
315 | def precommit(ui, wctx, status, match, force=False): |
|
333 | def precommit(ui, wctx, status, match, force=False): | |
316 | # type: (uimod.ui, context.workingcommitctx, scmutil.status, matchmod.basematcher, bool) -> Tuple[List[bytes], Set[bytes], Substate] |
|
334 | # type: (uimod.ui, context.workingcommitctx, scmutil.status, matchmod.basematcher, bool) -> Tuple[List[bytes], Set[bytes], Substate] | |
317 | """Calculate .hgsubstate changes that should be applied before committing |
|
335 | """Calculate .hgsubstate changes that should be applied before committing | |
318 |
|
336 | |||
319 | Returns (subs, commitsubs, newstate) where |
|
337 | Returns (subs, commitsubs, newstate) where | |
320 | - subs: changed subrepos (including dirty ones) |
|
338 | - subs: changed subrepos (including dirty ones) | |
321 | - commitsubs: dirty subrepos which the caller needs to commit recursively |
|
339 | - commitsubs: dirty subrepos which the caller needs to commit recursively | |
322 | - newstate: new state dict which the caller must write to .hgsubstate |
|
340 | - newstate: new state dict which the caller must write to .hgsubstate | |
323 |
|
341 | |||
324 | This also updates the given status argument. |
|
342 | This also updates the given status argument. | |
325 | """ |
|
343 | """ | |
326 | subs = [] |
|
344 | subs = [] | |
327 | commitsubs = set() |
|
345 | commitsubs = set() | |
328 | newstate = wctx.substate.copy() |
|
346 | newstate = wctx.substate.copy() | |
329 |
|
347 | |||
330 | # only manage subrepos and .hgsubstate if .hgsub is present |
|
348 | # only manage subrepos and .hgsubstate if .hgsub is present | |
331 | if b'.hgsub' in wctx: |
|
349 | if b'.hgsub' in wctx: | |
332 | # we'll decide whether to track this ourselves, thanks |
|
350 | # we'll decide whether to track this ourselves, thanks | |
333 | for c in status.modified, status.added, status.removed: |
|
351 | for c in status.modified, status.added, status.removed: | |
334 | if b'.hgsubstate' in c: |
|
352 | if b'.hgsubstate' in c: | |
335 | c.remove(b'.hgsubstate') |
|
353 | c.remove(b'.hgsubstate') | |
336 |
|
354 | |||
337 | # compare current state to last committed state |
|
355 | # compare current state to last committed state | |
338 | # build new substate based on last committed state |
|
356 | # build new substate based on last committed state | |
339 | oldstate = wctx.p1().substate |
|
357 | oldstate = wctx.p1().substate | |
340 | for s in sorted(newstate.keys()): |
|
358 | for s in sorted(newstate.keys()): | |
341 | if not match(s): |
|
359 | if not match(s): | |
342 | # ignore working copy, use old state if present |
|
360 | # ignore working copy, use old state if present | |
343 | if s in oldstate: |
|
361 | if s in oldstate: | |
344 | newstate[s] = oldstate[s] |
|
362 | newstate[s] = oldstate[s] | |
345 | continue |
|
363 | continue | |
346 | if not force: |
|
364 | if not force: | |
347 | raise error.Abort( |
|
365 | raise error.Abort( | |
348 | _(b"commit with new subrepo %s excluded") % s |
|
366 | _(b"commit with new subrepo %s excluded") % s | |
349 | ) |
|
367 | ) | |
350 | dirtyreason = wctx.sub(s).dirtyreason(True) |
|
368 | dirtyreason = wctx.sub(s).dirtyreason(True) | |
351 | if dirtyreason: |
|
369 | if dirtyreason: | |
352 | if not ui.configbool(b'ui', b'commitsubrepos'): |
|
370 | if not ui.configbool(b'ui', b'commitsubrepos'): | |
353 | raise error.Abort( |
|
371 | raise error.Abort( | |
354 | dirtyreason, |
|
372 | dirtyreason, | |
355 | hint=_(b"use --subrepos for recursive commit"), |
|
373 | hint=_(b"use --subrepos for recursive commit"), | |
356 | ) |
|
374 | ) | |
357 | subs.append(s) |
|
375 | subs.append(s) | |
358 | commitsubs.add(s) |
|
376 | commitsubs.add(s) | |
359 | else: |
|
377 | else: | |
360 | bs = wctx.sub(s).basestate() |
|
378 | bs = wctx.sub(s).basestate() | |
361 | newstate[s] = (newstate[s][0], bs, newstate[s][2]) |
|
379 | newstate[s] = (newstate[s][0], bs, newstate[s][2]) | |
362 | if oldstate.get(s, (None, None, None))[1] != bs: |
|
380 | if oldstate.get(s, (None, None, None))[1] != bs: | |
363 | subs.append(s) |
|
381 | subs.append(s) | |
364 |
|
382 | |||
365 | # check for removed subrepos |
|
383 | # check for removed subrepos | |
366 | for p in wctx.parents(): |
|
384 | for p in wctx.parents(): | |
367 | r = [s for s in p.substate if s not in newstate] |
|
385 | r = [s for s in p.substate if s not in newstate] | |
368 | subs += [s for s in r if match(s)] |
|
386 | subs += [s for s in r if match(s)] | |
369 | if subs: |
|
387 | if subs: | |
370 | if not match(b'.hgsub') and b'.hgsub' in ( |
|
388 | if not match(b'.hgsub') and b'.hgsub' in ( | |
371 | wctx.modified() + wctx.added() |
|
389 | wctx.modified() + wctx.added() | |
372 | ): |
|
390 | ): | |
373 | raise error.Abort(_(b"can't commit subrepos without .hgsub")) |
|
391 | raise error.Abort(_(b"can't commit subrepos without .hgsub")) | |
374 | status.modified.insert(0, b'.hgsubstate') |
|
392 | status.modified.insert(0, b'.hgsubstate') | |
375 |
|
393 | |||
376 | elif b'.hgsub' in status.removed: |
|
394 | elif b'.hgsub' in status.removed: | |
377 | # clean up .hgsubstate when .hgsub is removed |
|
395 | # clean up .hgsubstate when .hgsub is removed | |
378 | if b'.hgsubstate' in wctx and b'.hgsubstate' not in ( |
|
396 | if b'.hgsubstate' in wctx and b'.hgsubstate' not in ( | |
379 | status.modified + status.added + status.removed |
|
397 | status.modified + status.added + status.removed | |
380 | ): |
|
398 | ): | |
381 | status.removed.insert(0, b'.hgsubstate') |
|
399 | status.removed.insert(0, b'.hgsubstate') | |
382 |
|
400 | |||
383 | return subs, commitsubs, newstate |
|
401 | return subs, commitsubs, newstate | |
384 |
|
402 | |||
385 |
|
403 | |||
386 | def repo_rel_or_abs_source(repo): |
|
404 | def repo_rel_or_abs_source(repo): | |
387 | """return the source of this repo |
|
405 | """return the source of this repo | |
388 |
|
406 | |||
389 | Either absolute or relative the outermost repo""" |
|
407 | Either absolute or relative the outermost repo""" | |
390 | parent = repo |
|
408 | parent = repo | |
391 | chunks = [] |
|
409 | chunks = [] | |
392 | while util.safehasattr(parent, b'_subparent'): |
|
410 | while util.safehasattr(parent, b'_subparent'): | |
393 | source = urlutil.url(parent._subsource) |
|
411 | source = urlutil.url(parent._subsource) | |
394 | chunks.append(bytes(source)) |
|
412 | chunks.append(bytes(source)) | |
395 | if source.isabs(): |
|
413 | if source.isabs(): | |
396 | break |
|
414 | break | |
397 | parent = parent._subparent |
|
415 | parent = parent._subparent | |
398 |
|
416 | |||
399 | chunks.reverse() |
|
417 | chunks.reverse() | |
400 | path = posixpath.join(*chunks) |
|
418 | path = posixpath.join(*chunks) | |
401 | return posixpath.normpath(path) |
|
419 | return posixpath.normpath(path) | |
402 |
|
420 | |||
403 |
|
421 | |||
404 | def reporelpath(repo): |
|
422 | def reporelpath(repo): | |
405 | # type: (localrepo.localrepository) -> bytes |
|
423 | # type: (localrepo.localrepository) -> bytes | |
406 | """return path to this (sub)repo as seen from outermost repo""" |
|
424 | """return path to this (sub)repo as seen from outermost repo""" | |
407 | parent = repo |
|
425 | parent = repo | |
408 | while util.safehasattr(parent, b'_subparent'): |
|
426 | while util.safehasattr(parent, b'_subparent'): | |
409 | parent = parent._subparent |
|
427 | parent = parent._subparent | |
410 | return repo.root[len(pathutil.normasprefix(parent.root)) :] |
|
428 | return repo.root[len(pathutil.normasprefix(parent.root)) :] | |
411 |
|
429 | |||
412 |
|
430 | |||
413 | def subrelpath(sub): |
|
431 | def subrelpath(sub): | |
414 | # type: (subrepo.abstractsubrepo) -> bytes |
|
432 | # type: (subrepo.abstractsubrepo) -> bytes | |
415 | """return path to this subrepo as seen from outermost repo""" |
|
433 | """return path to this subrepo as seen from outermost repo""" | |
416 | return sub._relpath |
|
434 | return sub._relpath | |
417 |
|
435 | |||
418 |
|
436 | |||
419 | def _abssource(repo, push=False, abort=True): |
|
437 | def _abssource(repo, push=False, abort=True): | |
420 | # type: (localrepo.localrepository, bool, bool) -> Optional[bytes] |
|
438 | # type: (localrepo.localrepository, bool, bool) -> Optional[bytes] | |
421 | """return pull/push path of repo - either based on parent repo .hgsub info |
|
439 | """return pull/push path of repo - either based on parent repo .hgsub info | |
422 | or on the top repo config. Abort or return None if no source found.""" |
|
440 | or on the top repo config. Abort or return None if no source found.""" | |
423 | if util.safehasattr(repo, b'_subparent'): |
|
441 | if util.safehasattr(repo, b'_subparent'): | |
424 | source = urlutil.url(repo._subsource) |
|
442 | source = urlutil.url(repo._subsource) | |
425 | if source.isabs(): |
|
443 | if source.isabs(): | |
426 | return bytes(source) |
|
444 | return bytes(source) | |
427 | source.path = posixpath.normpath(source.path) |
|
445 | source.path = posixpath.normpath(source.path) | |
428 | parent = _abssource(repo._subparent, push, abort=False) |
|
446 | parent = _abssource(repo._subparent, push, abort=False) | |
429 | if parent: |
|
447 | if parent: | |
430 | parent = urlutil.url(util.pconvert(parent)) |
|
448 | parent = urlutil.url(util.pconvert(parent)) | |
431 | parent.path = posixpath.join(parent.path or b'', source.path) |
|
449 | parent.path = posixpath.join(parent.path or b'', source.path) | |
432 | parent.path = posixpath.normpath(parent.path) |
|
450 | parent.path = posixpath.normpath(parent.path) | |
433 | return bytes(parent) |
|
451 | return bytes(parent) | |
434 | else: # recursion reached top repo |
|
452 | else: # recursion reached top repo | |
435 | path = None |
|
453 | path = None | |
436 | if util.safehasattr(repo, b'_subtoppath'): |
|
454 | if util.safehasattr(repo, b'_subtoppath'): | |
437 | path = repo._subtoppath |
|
455 | path = repo._subtoppath | |
438 | elif push and repo.ui.config(b'paths', b'default-push'): |
|
456 | elif push and repo.ui.config(b'paths', b'default-push'): | |
439 | path = repo.ui.config(b'paths', b'default-push') |
|
457 | path = repo.ui.config(b'paths', b'default-push') | |
440 | elif repo.ui.config(b'paths', b'default'): |
|
458 | elif repo.ui.config(b'paths', b'default'): | |
441 | path = repo.ui.config(b'paths', b'default') |
|
459 | path = repo.ui.config(b'paths', b'default') | |
442 | elif repo.shared(): |
|
460 | elif repo.shared(): | |
443 | # chop off the .hg component to get the default path form. This has |
|
461 | # chop off the .hg component to get the default path form. This has | |
444 | # already run through vfsmod.vfs(..., realpath=True), so it doesn't |
|
462 | # already run through vfsmod.vfs(..., realpath=True), so it doesn't | |
445 | # have problems with 'C:' |
|
463 | # have problems with 'C:' | |
446 | return os.path.dirname(repo.sharedpath) |
|
464 | return os.path.dirname(repo.sharedpath) | |
447 | if path: |
|
465 | if path: | |
448 | # issue5770: 'C:\' and 'C:' are not equivalent paths. The former is |
|
466 | # issue5770: 'C:\' and 'C:' are not equivalent paths. The former is | |
449 | # as expected: an absolute path to the root of the C: drive. The |
|
467 | # as expected: an absolute path to the root of the C: drive. The | |
450 | # latter is a relative path, and works like so: |
|
468 | # latter is a relative path, and works like so: | |
451 | # |
|
469 | # | |
452 | # C:\>cd C:\some\path |
|
470 | # C:\>cd C:\some\path | |
453 | # C:\>D: |
|
471 | # C:\>D: | |
454 | # D:\>python -c "import os; print os.path.abspath('C:')" |
|
472 | # D:\>python -c "import os; print os.path.abspath('C:')" | |
455 | # C:\some\path |
|
473 | # C:\some\path | |
456 | # |
|
474 | # | |
457 | # D:\>python -c "import os; print os.path.abspath('C:relative')" |
|
475 | # D:\>python -c "import os; print os.path.abspath('C:relative')" | |
458 | # C:\some\path\relative |
|
476 | # C:\some\path\relative | |
459 | if urlutil.hasdriveletter(path): |
|
477 | if urlutil.hasdriveletter(path): | |
460 | if len(path) == 2 or path[2:3] not in br'\/': |
|
478 | if len(path) == 2 or path[2:3] not in br'\/': | |
461 | path = os.path.abspath(path) |
|
479 | path = os.path.abspath(path) | |
462 | return path |
|
480 | return path | |
463 |
|
481 | |||
464 | if abort: |
|
482 | if abort: | |
465 | raise error.Abort(_(b"default path for subrepository not found")) |
|
483 | raise error.Abort(_(b"default path for subrepository not found")) | |
466 |
|
484 | |||
467 |
|
485 | |||
468 | def newcommitphase(ui, ctx): |
|
486 | def newcommitphase(ui, ctx): | |
469 | # type: (uimod.ui, context.changectx) -> int |
|
487 | # type: (uimod.ui, context.changectx) -> int | |
470 | commitphase = phases.newcommitphase(ui) |
|
488 | commitphase = phases.newcommitphase(ui) | |
471 | substate = getattr(ctx, "substate", None) |
|
489 | substate = getattr(ctx, "substate", None) | |
472 | if not substate: |
|
490 | if not substate: | |
473 | return commitphase |
|
491 | return commitphase | |
474 | check = ui.config(b'phases', b'checksubrepos') |
|
492 | check = ui.config(b'phases', b'checksubrepos') | |
475 | if check not in (b'ignore', b'follow', b'abort'): |
|
493 | if check not in (b'ignore', b'follow', b'abort'): | |
476 | raise error.Abort( |
|
494 | raise error.Abort( | |
477 | _(b'invalid phases.checksubrepos configuration: %s') % check |
|
495 | _(b'invalid phases.checksubrepos configuration: %s') % check | |
478 | ) |
|
496 | ) | |
479 | if check == b'ignore': |
|
497 | if check == b'ignore': | |
480 | return commitphase |
|
498 | return commitphase | |
481 | maxphase = phases.public |
|
499 | maxphase = phases.public | |
482 | maxsub = None |
|
500 | maxsub = None | |
483 | for s in sorted(substate): |
|
501 | for s in sorted(substate): | |
484 | sub = ctx.sub(s) |
|
502 | sub = ctx.sub(s) | |
485 | subphase = sub.phase(substate[s][1]) |
|
503 | subphase = sub.phase(substate[s][1]) | |
486 | if maxphase < subphase: |
|
504 | if maxphase < subphase: | |
487 | maxphase = subphase |
|
505 | maxphase = subphase | |
488 | maxsub = s |
|
506 | maxsub = s | |
489 | if commitphase < maxphase: |
|
507 | if commitphase < maxphase: | |
490 | if check == b'abort': |
|
508 | if check == b'abort': | |
491 | raise error.Abort( |
|
509 | raise error.Abort( | |
492 | _( |
|
510 | _( | |
493 | b"can't commit in %s phase" |
|
511 | b"can't commit in %s phase" | |
494 | b" conflicting %s from subrepository %s" |
|
512 | b" conflicting %s from subrepository %s" | |
495 | ) |
|
513 | ) | |
496 | % ( |
|
514 | % ( | |
497 | phases.phasenames[commitphase], |
|
515 | phases.phasenames[commitphase], | |
498 | phases.phasenames[maxphase], |
|
516 | phases.phasenames[maxphase], | |
499 | maxsub, |
|
517 | maxsub, | |
500 | ) |
|
518 | ) | |
501 | ) |
|
519 | ) | |
502 | ui.warn( |
|
520 | ui.warn( | |
503 | _( |
|
521 | _( | |
504 | b"warning: changes are committed in" |
|
522 | b"warning: changes are committed in" | |
505 | b" %s phase from subrepository %s\n" |
|
523 | b" %s phase from subrepository %s\n" | |
506 | ) |
|
524 | ) | |
507 | % (phases.phasenames[maxphase], maxsub) |
|
525 | % (phases.phasenames[maxphase], maxsub) | |
508 | ) |
|
526 | ) | |
509 | return maxphase |
|
527 | return maxphase | |
510 | return commitphase |
|
528 | return commitphase |
General Comments 0
You need to be logged in to leave comments.
Login now