Show More
@@ -1,440 +1,491 b'' | |||
|
1 | 1 | # sparse.py - allow sparse checkouts of the working directory |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2014 Facebook, Inc. |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | """allow sparse checkouts of the working directory (EXPERIMENTAL) |
|
9 | 9 | |
|
10 | 10 | (This extension is not yet protected by backwards compatibility |
|
11 | 11 | guarantees. Any aspect may break in future releases until this |
|
12 | 12 | notice is removed.) |
|
13 | 13 | |
|
14 | 14 | This extension allows the working directory to only consist of a |
|
15 | 15 | subset of files for the revision. This allows specific files or |
|
16 | 16 | directories to be explicitly included or excluded. Many repository |
|
17 | 17 | operations have performance proportional to the number of files in |
|
18 | 18 | the working directory. So only realizing a subset of files in the |
|
19 | 19 | working directory can improve performance. |
|
20 | 20 | |
|
21 | 21 | Sparse Config Files |
|
22 | 22 | ------------------- |
|
23 | 23 | |
|
24 | 24 | The set of files that are part of a sparse checkout are defined by |
|
25 | 25 | a sparse config file. The file defines 3 things: includes (files to |
|
26 | 26 | include in the sparse checkout), excludes (files to exclude from the |
|
27 | 27 | sparse checkout), and profiles (links to other config files). |
|
28 | 28 | |
|
29 | 29 | The file format is newline delimited. Empty lines and lines beginning |
|
30 | 30 | with ``#`` are ignored. |
|
31 | 31 | |
|
32 | 32 | Lines beginning with ``%include `` denote another sparse config file |
|
33 | 33 | to include. e.g. ``%include tests.sparse``. The filename is relative |
|
34 | 34 | to the repository root. |
|
35 | 35 | |
|
36 | 36 | The special lines ``[include]`` and ``[exclude]`` denote the section |
|
37 | 37 | for includes and excludes that follow, respectively. It is illegal to |
|
38 | 38 | have ``[include]`` after ``[exclude]``. |
|
39 | 39 | |
|
40 | 40 | Non-special lines resemble file patterns to be added to either includes |
|
41 | 41 | or excludes. The syntax of these lines is documented by :hg:`help patterns`. |
|
42 | 42 | Patterns are interpreted as ``glob:`` by default and match against the |
|
43 | 43 | root of the repository. |
|
44 | 44 | |
|
45 | 45 | Exclusion patterns take precedence over inclusion patterns. So even |
|
46 | 46 | if a file is explicitly included, an ``[exclude]`` entry can remove it. |
|
47 | 47 | |
|
48 | 48 | For example, say you have a repository with 3 directories, ``frontend/``, |
|
49 | 49 | ``backend/``, and ``tools/``. ``frontend/`` and ``backend/`` correspond |
|
50 | 50 | to different projects and it is uncommon for someone working on one |
|
51 | 51 | to need the files for the other. But ``tools/`` contains files shared |
|
52 | 52 | between both projects. Your sparse config files may resemble:: |
|
53 | 53 | |
|
54 | 54 | # frontend.sparse |
|
55 | 55 | frontend/** |
|
56 | 56 | tools/** |
|
57 | 57 | |
|
58 | 58 | # backend.sparse |
|
59 | 59 | backend/** |
|
60 | 60 | tools/** |
|
61 | 61 | |
|
62 | 62 | Say the backend grows in size. Or there's a directory with thousands |
|
63 | 63 | of files you wish to exclude. You can modify the profile to exclude |
|
64 | 64 | certain files:: |
|
65 | 65 | |
|
66 | 66 | [include] |
|
67 | 67 | backend/** |
|
68 | 68 | tools/** |
|
69 | 69 | |
|
70 | 70 | [exclude] |
|
71 | 71 | tools/tests/** |
|
72 | 72 | """ |
|
73 | 73 | |
|
74 | 74 | from __future__ import absolute_import |
|
75 | 75 | |
|
76 | 76 | from mercurial.i18n import _ |
|
77 | 77 | from mercurial.pycompat import setattr |
|
78 | 78 | from mercurial import ( |
|
79 | 79 | commands, |
|
80 | 80 | dirstate, |
|
81 | 81 | error, |
|
82 | 82 | extensions, |
|
83 | 83 | logcmdutil, |
|
84 | 84 | match as matchmod, |
|
85 | 85 | merge as mergemod, |
|
86 | 86 | pycompat, |
|
87 | 87 | registrar, |
|
88 | 88 | sparse, |
|
89 | subrepo, | |
|
90 | subrepoutil, | |
|
89 | 91 | util, |
|
90 | 92 | ) |
|
91 | 93 | |
|
92 | 94 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for |
|
93 | 95 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should |
|
94 | 96 | # be specifying the version(s) of Mercurial they are tested with, or |
|
95 | 97 | # leave the attribute unspecified. |
|
96 | 98 | testedwith = b'ships-with-hg-core' |
|
97 | 99 | |
|
98 | 100 | cmdtable = {} |
|
99 | 101 | command = registrar.command(cmdtable) |
|
100 | 102 | |
|
101 | 103 | |
|
102 | 104 | def extsetup(ui): |
|
103 | 105 | sparse.enabled = True |
|
104 | 106 | |
|
105 | 107 | _setupclone(ui) |
|
106 | 108 | _setuplog(ui) |
|
107 | 109 | _setupadd(ui) |
|
108 | 110 | _setupdirstate(ui) |
|
111 | _setupsubrepo(ui) | |
|
109 | 112 | |
|
110 | 113 | |
|
111 | 114 | def replacefilecache(cls, propname, replacement): |
|
112 | 115 | """Replace a filecache property with a new class. This allows changing the |
|
113 | 116 | cache invalidation condition.""" |
|
114 | 117 | origcls = cls |
|
115 | 118 | assert callable(replacement) |
|
116 | 119 | while cls is not object: |
|
117 | 120 | if propname in cls.__dict__: |
|
118 | 121 | orig = cls.__dict__[propname] |
|
119 | 122 | setattr(cls, propname, replacement(orig)) |
|
120 | 123 | break |
|
121 | 124 | cls = cls.__bases__[0] |
|
122 | 125 | |
|
123 | 126 | if cls is object: |
|
124 | 127 | raise AttributeError( |
|
125 | 128 | _(b"type '%s' has no property '%s'") % (origcls, propname) |
|
126 | 129 | ) |
|
127 | 130 | |
|
128 | 131 | |
|
129 | 132 | def _setuplog(ui): |
|
130 | 133 | entry = commands.table[b'log|history'] |
|
131 | 134 | entry[1].append( |
|
132 | 135 | ( |
|
133 | 136 | b'', |
|
134 | 137 | b'sparse', |
|
135 | 138 | None, |
|
136 | 139 | b"limit to changesets affecting the sparse checkout", |
|
137 | 140 | ) |
|
138 | 141 | ) |
|
139 | 142 | |
|
140 | 143 | def _initialrevs(orig, repo, wopts): |
|
141 | 144 | revs = orig(repo, wopts) |
|
142 | 145 | if wopts.opts.get(b'sparse'): |
|
143 | 146 | sparsematch = sparse.matcher(repo) |
|
144 | 147 | |
|
145 | 148 | def ctxmatch(rev): |
|
146 | 149 | ctx = repo[rev] |
|
147 | 150 | return any(f for f in ctx.files() if sparsematch(f)) |
|
148 | 151 | |
|
149 | 152 | revs = revs.filter(ctxmatch) |
|
150 | 153 | return revs |
|
151 | 154 | |
|
152 | 155 | extensions.wrapfunction(logcmdutil, b'_initialrevs', _initialrevs) |
|
153 | 156 | |
|
154 | 157 | |
|
155 | 158 | def _clonesparsecmd(orig, ui, repo, *args, **opts): |
|
156 | 159 | include_pat = opts.get('include') |
|
157 | 160 | exclude_pat = opts.get('exclude') |
|
158 | 161 | enableprofile_pat = opts.get('enable_profile') |
|
159 | 162 | narrow_pat = opts.get('narrow') |
|
160 | 163 | include = exclude = enableprofile = False |
|
161 | 164 | if include_pat: |
|
162 | 165 | pat = include_pat |
|
163 | 166 | include = True |
|
164 | 167 | if exclude_pat: |
|
165 | 168 | pat = exclude_pat |
|
166 | 169 | exclude = True |
|
167 | 170 | if enableprofile_pat: |
|
168 | 171 | pat = enableprofile_pat |
|
169 | 172 | enableprofile = True |
|
170 | 173 | if sum([include, exclude, enableprofile]) > 1: |
|
171 | 174 | raise error.Abort(_(b"too many flags specified.")) |
|
172 | 175 | # if --narrow is passed, it means they are includes and excludes for narrow |
|
173 | 176 | # clone |
|
174 | 177 | if not narrow_pat and (include or exclude or enableprofile): |
|
175 | 178 | |
|
176 | 179 | def clonesparse(orig, ctx, *args, **kwargs): |
|
177 | 180 | sparse.updateconfig( |
|
178 | 181 | ctx.repo().unfiltered(), |
|
179 | 182 | pat, |
|
180 | 183 | {}, |
|
181 | 184 | include=include, |
|
182 | 185 | exclude=exclude, |
|
183 | 186 | enableprofile=enableprofile, |
|
184 | 187 | usereporootpaths=True, |
|
185 | 188 | ) |
|
186 | 189 | return orig(ctx, *args, **kwargs) |
|
187 | 190 | |
|
188 | 191 | extensions.wrapfunction(mergemod, b'update', clonesparse) |
|
189 | 192 | return orig(ui, repo, *args, **opts) |
|
190 | 193 | |
|
191 | 194 | |
|
192 | 195 | def _setupclone(ui): |
|
193 | 196 | entry = commands.table[b'clone'] |
|
194 | 197 | entry[1].append((b'', b'enable-profile', [], b'enable a sparse profile')) |
|
195 | 198 | entry[1].append((b'', b'include', [], b'include sparse pattern')) |
|
196 | 199 | entry[1].append((b'', b'exclude', [], b'exclude sparse pattern')) |
|
197 | 200 | extensions.wrapcommand(commands.table, b'clone', _clonesparsecmd) |
|
198 | 201 | |
|
199 | 202 | |
|
200 | 203 | def _setupadd(ui): |
|
201 | 204 | entry = commands.table[b'add'] |
|
202 | 205 | entry[1].append( |
|
203 | 206 | ( |
|
204 | 207 | b's', |
|
205 | 208 | b'sparse', |
|
206 | 209 | None, |
|
207 | 210 | b'also include directories of added files in sparse config', |
|
208 | 211 | ) |
|
209 | 212 | ) |
|
210 | 213 | |
|
211 | 214 | def _add(orig, ui, repo, *pats, **opts): |
|
212 | 215 | if opts.get('sparse'): |
|
213 | 216 | dirs = set() |
|
214 | 217 | for pat in pats: |
|
215 | 218 | dirname, basename = util.split(pat) |
|
216 | 219 | dirs.add(dirname) |
|
217 | 220 | sparse.updateconfig(repo, list(dirs), opts, include=True) |
|
218 | 221 | return orig(ui, repo, *pats, **opts) |
|
219 | 222 | |
|
220 | 223 | extensions.wrapcommand(commands.table, b'add', _add) |
|
221 | 224 | |
|
222 | 225 | |
|
223 | 226 | def _setupdirstate(ui): |
|
224 | 227 | """Modify the dirstate to prevent stat'ing excluded files, |
|
225 | 228 | and to prevent modifications to files outside the checkout. |
|
226 | 229 | """ |
|
227 | 230 | |
|
228 | 231 | def walk(orig, self, match, subrepos, unknown, ignored, full=True): |
|
229 | 232 | # hack to not exclude explicitly-specified paths so that they can |
|
230 | 233 | # be warned later on e.g. dirstate.add() |
|
231 | 234 | em = matchmod.exact(match.files()) |
|
232 | 235 | sm = matchmod.unionmatcher([self._sparsematcher, em]) |
|
233 | 236 | match = matchmod.intersectmatchers(match, sm) |
|
234 | 237 | return orig(self, match, subrepos, unknown, ignored, full) |
|
235 | 238 | |
|
236 | 239 | extensions.wrapfunction(dirstate.dirstate, b'walk', walk) |
|
237 | 240 | |
|
238 | 241 | # dirstate.rebuild should not add non-matching files |
|
239 | 242 | def _rebuild(orig, self, parent, allfiles, changedfiles=None): |
|
240 | 243 | matcher = self._sparsematcher |
|
241 | 244 | if not matcher.always(): |
|
242 | 245 | allfiles = [f for f in allfiles if matcher(f)] |
|
243 | 246 | if changedfiles: |
|
244 | 247 | changedfiles = [f for f in changedfiles if matcher(f)] |
|
245 | 248 | |
|
246 | 249 | if changedfiles is not None: |
|
247 | 250 | # In _rebuild, these files will be deleted from the dirstate |
|
248 | 251 | # when they are not found to be in allfiles |
|
249 | 252 | dirstatefilestoremove = {f for f in self if not matcher(f)} |
|
250 | 253 | changedfiles = dirstatefilestoremove.union(changedfiles) |
|
251 | 254 | |
|
252 | 255 | return orig(self, parent, allfiles, changedfiles) |
|
253 | 256 | |
|
254 | 257 | extensions.wrapfunction(dirstate.dirstate, b'rebuild', _rebuild) |
|
255 | 258 | |
|
256 | 259 | # Prevent adding files that are outside the sparse checkout |
|
257 | 260 | editfuncs = [ |
|
258 | 261 | b'normal', |
|
259 | 262 | b'add', |
|
260 | 263 | b'normallookup', |
|
261 | 264 | b'copy', |
|
262 | 265 | b'remove', |
|
263 | 266 | b'merge', |
|
264 | 267 | ] |
|
265 | 268 | hint = _( |
|
266 | 269 | b'include file with `hg debugsparse --include <pattern>` or use ' |
|
267 | 270 | + b'`hg add -s <file>` to include file directory while adding' |
|
268 | 271 | ) |
|
269 | 272 | for func in editfuncs: |
|
270 | 273 | |
|
271 | 274 | def _wrapper(orig, self, *args, **kwargs): |
|
272 | 275 | sparsematch = self._sparsematcher |
|
273 | 276 | if not sparsematch.always(): |
|
274 | 277 | for f in args: |
|
275 | 278 | if f is not None and not sparsematch(f) and f not in self: |
|
276 | 279 | raise error.Abort( |
|
277 | 280 | _( |
|
278 | 281 | b"cannot add '%s' - it is outside " |
|
279 | 282 | b"the sparse checkout" |
|
280 | 283 | ) |
|
281 | 284 | % f, |
|
282 | 285 | hint=hint, |
|
283 | 286 | ) |
|
284 | 287 | return orig(self, *args, **kwargs) |
|
285 | 288 | |
|
286 | 289 | extensions.wrapfunction(dirstate.dirstate, func, _wrapper) |
|
287 | 290 | |
|
288 | 291 | |
|
292 | class DummySubrepo(subrepo.abstractsubrepo): | |
|
293 | """Dumy subrepo is replacement of subrepo, that should be filterout from sparce. | |
|
294 | this subrepo acts as always clean and always get/remove well. | |
|
295 | """ | |
|
296 | ||
|
297 | def dirty(self, ignoreupdate=False, missing=False): | |
|
298 | return False | |
|
299 | ||
|
300 | def get(self, state, overwrite=False): | |
|
301 | return | |
|
302 | ||
|
303 | def remove(self): | |
|
304 | return | |
|
305 | ||
|
306 | ||
|
307 | def _setupsubrepo(ui): | |
|
308 | """Modify the dirstate to prevent stat'ing excluded files, | |
|
309 | and to prevent modifications to files outside the checkout. | |
|
310 | """ | |
|
311 | ||
|
312 | def _state(orig, ctx, ui): | |
|
313 | sparsematch = sparse.matcher(ctx.repo(), revs=[ctx.rev()]) | |
|
314 | if not sparsematch.always(): | |
|
315 | # filter allstate, leave only sparce pathes | |
|
316 | allstate = orig(ctx, ui) | |
|
317 | sparcestate = dict() | |
|
318 | for (idx, item) in allstate.items(): | |
|
319 | if sparsematch(idx): | |
|
320 | sparcestate[idx] = item | |
|
321 | return sparcestate | |
|
322 | else: | |
|
323 | return orig(ctx, ui) | |
|
324 | ||
|
325 | # extensions.wrapfunction(subrepoutil, b'state', _state) | |
|
326 | ||
|
327 | """ provide DummySubrepo for pathes out of sparse | |
|
328 | """ | |
|
329 | ||
|
330 | def _subrepo(orig, ctx, path, allowwdir=False, allowcreate=True): | |
|
331 | sparsematch = sparse.matcher(ctx.repo(), revs=[ctx.rev()]) | |
|
332 | if not sparsematch.always(): | |
|
333 | if not sparsematch(path): | |
|
334 | return DummySubrepo(ctx, path) | |
|
335 | return orig(ctx, path, allowwdir, allowcreate) | |
|
336 | ||
|
337 | extensions.wrapfunction(subrepo, b'subrepo', _subrepo) | |
|
338 | ||
|
339 | ||
|
289 | 340 | @command( |
|
290 | 341 | b'debugsparse', |
|
291 | 342 | [ |
|
292 | 343 | (b'I', b'include', False, _(b'include files in the sparse checkout')), |
|
293 | 344 | (b'X', b'exclude', False, _(b'exclude files in the sparse checkout')), |
|
294 | 345 | (b'd', b'delete', False, _(b'delete an include/exclude rule')), |
|
295 | 346 | ( |
|
296 | 347 | b'f', |
|
297 | 348 | b'force', |
|
298 | 349 | False, |
|
299 | 350 | _(b'allow changing rules even with pending changes'), |
|
300 | 351 | ), |
|
301 | 352 | (b'', b'enable-profile', False, _(b'enables the specified profile')), |
|
302 | 353 | (b'', b'disable-profile', False, _(b'disables the specified profile')), |
|
303 | 354 | (b'', b'import-rules', False, _(b'imports rules from a file')), |
|
304 | 355 | (b'', b'clear-rules', False, _(b'clears local include/exclude rules')), |
|
305 | 356 | ( |
|
306 | 357 | b'', |
|
307 | 358 | b'refresh', |
|
308 | 359 | False, |
|
309 | 360 | _(b'updates the working after sparseness changes'), |
|
310 | 361 | ), |
|
311 | 362 | (b'', b'reset', False, _(b'makes the repo full again')), |
|
312 | 363 | ] |
|
313 | 364 | + commands.templateopts, |
|
314 | 365 | _(b'[--OPTION] PATTERN...'), |
|
315 | 366 | helpbasic=True, |
|
316 | 367 | ) |
|
317 | 368 | def debugsparse(ui, repo, *pats, **opts): |
|
318 | 369 | """make the current checkout sparse, or edit the existing checkout |
|
319 | 370 | |
|
320 | 371 | The sparse command is used to make the current checkout sparse. |
|
321 | 372 | This means files that don't meet the sparse condition will not be |
|
322 | 373 | written to disk, or show up in any working copy operations. It does |
|
323 | 374 | not affect files in history in any way. |
|
324 | 375 | |
|
325 | 376 | Passing no arguments prints the currently applied sparse rules. |
|
326 | 377 | |
|
327 | 378 | --include and --exclude are used to add and remove files from the sparse |
|
328 | 379 | checkout. The effects of adding an include or exclude rule are applied |
|
329 | 380 | immediately. If applying the new rule would cause a file with pending |
|
330 | 381 | changes to be added or removed, the command will fail. Pass --force to |
|
331 | 382 | force a rule change even with pending changes (the changes on disk will |
|
332 | 383 | be preserved). |
|
333 | 384 | |
|
334 | 385 | --delete removes an existing include/exclude rule. The effects are |
|
335 | 386 | immediate. |
|
336 | 387 | |
|
337 | 388 | --refresh refreshes the files on disk based on the sparse rules. This is |
|
338 | 389 | only necessary if .hg/sparse was changed by hand. |
|
339 | 390 | |
|
340 | 391 | --enable-profile and --disable-profile accept a path to a .hgsparse file. |
|
341 | 392 | This allows defining sparse checkouts and tracking them inside the |
|
342 | 393 | repository. This is useful for defining commonly used sparse checkouts for |
|
343 | 394 | many people to use. As the profile definition changes over time, the sparse |
|
344 | 395 | checkout will automatically be updated appropriately, depending on which |
|
345 | 396 | changeset is checked out. Changes to .hgsparse are not applied until they |
|
346 | 397 | have been committed. |
|
347 | 398 | |
|
348 | 399 | --import-rules accepts a path to a file containing rules in the .hgsparse |
|
349 | 400 | format, allowing you to add --include, --exclude and --enable-profile rules |
|
350 | 401 | in bulk. Like the --include, --exclude and --enable-profile switches, the |
|
351 | 402 | changes are applied immediately. |
|
352 | 403 | |
|
353 | 404 | --clear-rules removes all local include and exclude rules, while leaving |
|
354 | 405 | any enabled profiles in place. |
|
355 | 406 | |
|
356 | 407 | Returns 0 if editing the sparse checkout succeeds. |
|
357 | 408 | """ |
|
358 | 409 | opts = pycompat.byteskwargs(opts) |
|
359 | 410 | include = opts.get(b'include') |
|
360 | 411 | exclude = opts.get(b'exclude') |
|
361 | 412 | force = opts.get(b'force') |
|
362 | 413 | enableprofile = opts.get(b'enable_profile') |
|
363 | 414 | disableprofile = opts.get(b'disable_profile') |
|
364 | 415 | importrules = opts.get(b'import_rules') |
|
365 | 416 | clearrules = opts.get(b'clear_rules') |
|
366 | 417 | delete = opts.get(b'delete') |
|
367 | 418 | refresh = opts.get(b'refresh') |
|
368 | 419 | reset = opts.get(b'reset') |
|
369 | 420 | count = sum( |
|
370 | 421 | [ |
|
371 | 422 | include, |
|
372 | 423 | exclude, |
|
373 | 424 | enableprofile, |
|
374 | 425 | disableprofile, |
|
375 | 426 | delete, |
|
376 | 427 | importrules, |
|
377 | 428 | refresh, |
|
378 | 429 | clearrules, |
|
379 | 430 | reset, |
|
380 | 431 | ] |
|
381 | 432 | ) |
|
382 | 433 | if count > 1: |
|
383 | 434 | raise error.Abort(_(b"too many flags specified")) |
|
384 | 435 | |
|
385 | 436 | if count == 0: |
|
386 | 437 | if repo.vfs.exists(b'sparse'): |
|
387 | 438 | ui.status(repo.vfs.read(b"sparse") + b"\n") |
|
388 | 439 | temporaryincludes = sparse.readtemporaryincludes(repo) |
|
389 | 440 | if temporaryincludes: |
|
390 | 441 | ui.status( |
|
391 | 442 | _(b"Temporarily Included Files (for merge/rebase):\n") |
|
392 | 443 | ) |
|
393 | 444 | ui.status((b"\n".join(temporaryincludes) + b"\n")) |
|
394 | 445 | return |
|
395 | 446 | else: |
|
396 | 447 | raise error.Abort( |
|
397 | 448 | _( |
|
398 | 449 | b'the debugsparse command is only supported on' |
|
399 | 450 | b' sparse repositories' |
|
400 | 451 | ) |
|
401 | 452 | ) |
|
402 | 453 | |
|
403 | 454 | if include or exclude or delete or reset or enableprofile or disableprofile: |
|
404 | 455 | sparse.updateconfig( |
|
405 | 456 | repo, |
|
406 | 457 | pats, |
|
407 | 458 | opts, |
|
408 | 459 | include=include, |
|
409 | 460 | exclude=exclude, |
|
410 | 461 | reset=reset, |
|
411 | 462 | delete=delete, |
|
412 | 463 | enableprofile=enableprofile, |
|
413 | 464 | disableprofile=disableprofile, |
|
414 | 465 | force=force, |
|
415 | 466 | ) |
|
416 | 467 | |
|
417 | 468 | if importrules: |
|
418 | 469 | sparse.importfromfiles(repo, opts, pats, force=force) |
|
419 | 470 | |
|
420 | 471 | if clearrules: |
|
421 | 472 | sparse.clearrules(repo, force=force) |
|
422 | 473 | |
|
423 | 474 | if refresh: |
|
424 | 475 | try: |
|
425 | 476 | wlock = repo.wlock() |
|
426 | 477 | fcounts = map( |
|
427 | 478 | len, |
|
428 | 479 | sparse.refreshwdir( |
|
429 | 480 | repo, repo.status(), sparse.matcher(repo), force=force |
|
430 | 481 | ), |
|
431 | 482 | ) |
|
432 | 483 | sparse.printchanges( |
|
433 | 484 | ui, |
|
434 | 485 | opts, |
|
435 | 486 | added=fcounts[0], |
|
436 | 487 | dropped=fcounts[1], |
|
437 | 488 | conflicting=fcounts[2], |
|
438 | 489 | ) |
|
439 | 490 | finally: |
|
440 | 491 | wlock.release() |
@@ -1,510 +1,528 b'' | |||
|
1 | 1 | # subrepoutil.py - sub-repository operations and substate handling |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2009-2010 Olivia Mackall <olivia@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | import errno |
|
11 | 11 | import os |
|
12 | 12 | import posixpath |
|
13 | 13 | import re |
|
14 | 14 | |
|
15 | 15 | from .i18n import _ |
|
16 | 16 | from .pycompat import getattr |
|
17 | 17 | from . import ( |
|
18 | 18 | config, |
|
19 | 19 | error, |
|
20 | 20 | filemerge, |
|
21 | 21 | pathutil, |
|
22 | 22 | phases, |
|
23 | 23 | pycompat, |
|
24 | sparse, | |
|
24 | 25 | util, |
|
25 | 26 | ) |
|
26 | 27 | from .utils import ( |
|
27 | 28 | stringutil, |
|
28 | 29 | urlutil, |
|
29 | 30 | ) |
|
30 | 31 | |
|
31 | 32 | nullstate = (b'', b'', b'empty') |
|
32 | 33 | |
|
33 | 34 | if pycompat.TYPE_CHECKING: |
|
34 | 35 | from typing import ( |
|
35 | 36 | Any, |
|
36 | 37 | Dict, |
|
37 | 38 | List, |
|
38 | 39 | Optional, |
|
39 | 40 | Set, |
|
40 | 41 | Tuple, |
|
41 | 42 | ) |
|
42 | 43 | from . import ( |
|
43 | 44 | context, |
|
44 | 45 | localrepo, |
|
45 | 46 | match as matchmod, |
|
46 | 47 | scmutil, |
|
47 | 48 | subrepo, |
|
48 | 49 | ui as uimod, |
|
49 | 50 | ) |
|
50 | 51 | |
|
51 | 52 | Substate = Dict[bytes, Tuple[bytes, bytes, bytes]] |
|
52 | 53 | |
|
53 | 54 | |
|
54 | 55 | def state(ctx, ui): |
|
55 | 56 | # type: (context.changectx, uimod.ui) -> Substate |
|
56 | 57 | """return a state dict, mapping subrepo paths configured in .hgsub |
|
57 | 58 | to tuple: (source from .hgsub, revision from .hgsubstate, kind |
|
58 | 59 | (key in types dict)) |
|
59 | 60 | """ |
|
60 | 61 | p = config.config() |
|
61 | 62 | repo = ctx.repo() |
|
62 | 63 | |
|
63 | 64 | def read(f, sections=None, remap=None): |
|
64 | 65 | if f in ctx: |
|
65 | 66 | try: |
|
66 | 67 | data = ctx[f].data() |
|
67 | 68 | except IOError as err: |
|
68 | 69 | if err.errno != errno.ENOENT: |
|
69 | 70 | raise |
|
70 | 71 | # handle missing subrepo spec files as removed |
|
71 | 72 | ui.warn( |
|
72 | 73 | _(b"warning: subrepo spec file \'%s\' not found\n") |
|
73 | 74 | % repo.pathto(f) |
|
74 | 75 | ) |
|
75 | 76 | return |
|
76 | 77 | p.parse(f, data, sections, remap, read) |
|
77 | 78 | else: |
|
78 | 79 | raise error.Abort( |
|
79 | 80 | _(b"subrepo spec file \'%s\' not found") % repo.pathto(f) |
|
80 | 81 | ) |
|
81 | 82 | |
|
82 | 83 | if b'.hgsub' in ctx: |
|
83 | 84 | read(b'.hgsub') |
|
84 | 85 | |
|
85 | 86 | for path, src in ui.configitems(b'subpaths'): |
|
86 | 87 | p.set(b'subpaths', path, src, ui.configsource(b'subpaths', path)) |
|
87 | 88 | |
|
88 | 89 | rev = {} |
|
89 | 90 | if b'.hgsubstate' in ctx: |
|
90 | 91 | try: |
|
91 | 92 | for i, l in enumerate(ctx[b'.hgsubstate'].data().splitlines()): |
|
92 | 93 | l = l.lstrip() |
|
93 | 94 | if not l: |
|
94 | 95 | continue |
|
95 | 96 | try: |
|
96 | 97 | revision, path = l.split(b" ", 1) |
|
97 | 98 | except ValueError: |
|
98 | 99 | raise error.Abort( |
|
99 | 100 | _( |
|
100 | 101 | b"invalid subrepository revision " |
|
101 | 102 | b"specifier in \'%s\' line %d" |
|
102 | 103 | ) |
|
103 | 104 | % (repo.pathto(b'.hgsubstate'), (i + 1)) |
|
104 | 105 | ) |
|
105 | 106 | rev[path] = revision |
|
106 | 107 | except IOError as err: |
|
107 | 108 | if err.errno != errno.ENOENT: |
|
108 | 109 | raise |
|
109 | 110 | |
|
110 | 111 | def remap(src): |
|
111 | 112 | # type: (bytes) -> bytes |
|
112 | 113 | for pattern, repl in p.items(b'subpaths'): |
|
113 | 114 | # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub |
|
114 | 115 | # does a string decode. |
|
115 | 116 | repl = stringutil.escapestr(repl) |
|
116 | 117 | # However, we still want to allow back references to go |
|
117 | 118 | # through unharmed, so we turn r'\\1' into r'\1'. Again, |
|
118 | 119 | # extra escapes are needed because re.sub string decodes. |
|
119 | 120 | repl = re.sub(br'\\\\([0-9]+)', br'\\\1', repl) |
|
120 | 121 | try: |
|
121 | 122 | src = re.sub(pattern, repl, src, 1) |
|
122 | 123 | except re.error as e: |
|
123 | 124 | raise error.Abort( |
|
124 | 125 | _(b"bad subrepository pattern in %s: %s") |
|
125 | 126 | % ( |
|
126 | 127 | p.source(b'subpaths', pattern), |
|
127 | 128 | stringutil.forcebytestr(e), |
|
128 | 129 | ) |
|
129 | 130 | ) |
|
130 | 131 | return src |
|
131 | 132 | |
|
132 | 133 | state = {} |
|
133 | 134 | for path, src in p.items(b''): # type: bytes |
|
134 | 135 | kind = b'hg' |
|
135 | 136 | if src.startswith(b'['): |
|
136 | 137 | if b']' not in src: |
|
137 | 138 | raise error.Abort(_(b'missing ] in subrepository source')) |
|
138 | 139 | kind, src = src.split(b']', 1) |
|
139 | 140 | kind = kind[1:] |
|
140 | 141 | src = src.lstrip() # strip any extra whitespace after ']' |
|
141 | 142 | |
|
142 | 143 | if not urlutil.url(src).isabs(): |
|
143 | 144 | parent = _abssource(repo, abort=False) |
|
144 | 145 | if parent: |
|
145 | 146 | parent = urlutil.url(parent) |
|
146 | 147 | parent.path = posixpath.join(parent.path or b'', src) |
|
147 | 148 | parent.path = posixpath.normpath(parent.path) |
|
148 | 149 | joined = bytes(parent) |
|
149 | 150 | # Remap the full joined path and use it if it changes, |
|
150 | 151 | # else remap the original source. |
|
151 | 152 | remapped = remap(joined) |
|
152 | 153 | if remapped == joined: |
|
153 | 154 | src = remap(src) |
|
154 | 155 | else: |
|
155 | 156 | src = remapped |
|
156 | 157 | |
|
157 | 158 | src = remap(src) |
|
158 | 159 | state[util.pconvert(path)] = (src.strip(), rev.get(path, b''), kind) |
|
159 | 160 | |
|
160 | 161 | return state |
|
161 | 162 | |
|
162 | 163 | |
|
163 | 164 | def writestate(repo, state): |
|
164 | 165 | # type: (localrepo.localrepository, Substate) -> None |
|
165 | 166 | """rewrite .hgsubstate in (outer) repo with these subrepo states""" |
|
166 | 167 | lines = [ |
|
167 | 168 | b'%s %s\n' % (state[s][1], s) |
|
168 | 169 | for s in sorted(state) |
|
169 | 170 | if state[s][1] != nullstate[1] |
|
170 | 171 | ] |
|
171 | 172 | repo.wwrite(b'.hgsubstate', b''.join(lines), b'') |
|
172 | 173 | |
|
173 | 174 | |
|
174 | 175 | def submerge(repo, wctx, mctx, actx, overwrite, labels=None): |
|
175 | 176 | # type: (localrepo.localrepository, context.workingctx, context.changectx, context.changectx, bool, Optional[Any]) -> Substate |
|
176 | 177 | # TODO: type the `labels` arg |
|
177 | 178 | """delegated from merge.applyupdates: merging of .hgsubstate file |
|
178 | 179 | in working context, merging context and ancestor context""" |
|
179 | 180 | if mctx == actx: # backwards? |
|
180 | 181 | actx = wctx.p1() |
|
181 | 182 | s1 = wctx.substate |
|
182 | 183 | s2 = mctx.substate |
|
183 | 184 | sa = actx.substate |
|
184 | 185 | sm = {} |
|
185 | 186 | |
|
187 | s1match = sparse.matcher(repo, revs=[wctx.rev()]) | |
|
188 | s2match = sparse.matcher(repo, revs=[mctx.rev()]) | |
|
189 | ||
|
186 | 190 | repo.ui.debug(b"subrepo merge %s %s %s\n" % (wctx, mctx, actx)) |
|
187 | 191 | |
|
188 | 192 | def debug(s, msg, r=b""): |
|
189 | 193 | if r: |
|
190 | 194 | r = b"%s:%s:%s" % r |
|
191 | 195 | repo.ui.debug(b" subrepo %s: %s %s\n" % (s, msg, r)) |
|
192 | 196 | |
|
193 | 197 | promptssrc = filemerge.partextras(labels) |
|
194 | 198 | for s, l in sorted(pycompat.iteritems(s1)): |
|
199 | if not s1match(s): | |
|
200 | sm[s] = l # ignore changes out of sparse | |
|
201 | continue | |
|
195 | 202 | a = sa.get(s, nullstate) |
|
196 | 203 | ld = l # local state with possible dirty flag for compares |
|
197 | 204 | if wctx.sub(s).dirty(): |
|
198 | 205 | ld = (l[0], l[1] + b"+") |
|
199 | 206 | if wctx == actx: # overwrite |
|
200 | 207 | a = ld |
|
201 | 208 | |
|
202 | 209 | prompts = promptssrc.copy() |
|
203 | 210 | prompts[b's'] = s |
|
204 | 211 | if s in s2: |
|
212 | if not s2match(s): | |
|
213 | sm[s] = l # ignore changes out of sparse | |
|
214 | continue | |
|
205 | 215 | r = s2[s] |
|
206 | 216 | if ld == r or r == a: # no change or local is newer |
|
207 | 217 | sm[s] = l |
|
208 | 218 | continue |
|
209 | 219 | elif ld == a: # other side changed |
|
210 | 220 | debug(s, b"other changed, get", r) |
|
211 | 221 | wctx.sub(s).get(r, overwrite) |
|
212 | 222 | sm[s] = r |
|
213 | 223 | elif ld[0] != r[0]: # sources differ |
|
214 | 224 | prompts[b'lo'] = l[0] |
|
215 | 225 | prompts[b'ro'] = r[0] |
|
216 | 226 | if repo.ui.promptchoice( |
|
217 | 227 | _( |
|
218 | 228 | b' subrepository sources for %(s)s differ\n' |
|
219 | 229 | b'you can use (l)ocal%(l)s source (%(lo)s)' |
|
220 | 230 | b' or (r)emote%(o)s source (%(ro)s).\n' |
|
221 | 231 | b'what do you want to do?' |
|
222 | 232 | b'$$ &Local $$ &Remote' |
|
223 | 233 | ) |
|
224 | 234 | % prompts, |
|
225 | 235 | 0, |
|
226 | 236 | ): |
|
227 | 237 | debug(s, b"prompt changed, get", r) |
|
228 | 238 | wctx.sub(s).get(r, overwrite) |
|
229 | 239 | sm[s] = r |
|
230 | 240 | elif ld[1] == a[1]: # local side is unchanged |
|
231 | 241 | debug(s, b"other side changed, get", r) |
|
232 | 242 | wctx.sub(s).get(r, overwrite) |
|
233 | 243 | sm[s] = r |
|
234 | 244 | else: |
|
235 | 245 | debug(s, b"both sides changed") |
|
236 | 246 | srepo = wctx.sub(s) |
|
237 | 247 | prompts[b'sl'] = srepo.shortid(l[1]) |
|
238 | 248 | prompts[b'sr'] = srepo.shortid(r[1]) |
|
239 | 249 | option = repo.ui.promptchoice( |
|
240 | 250 | _( |
|
241 | 251 | b' subrepository %(s)s diverged (local revision: %(sl)s, ' |
|
242 | 252 | b'remote revision: %(sr)s)\n' |
|
243 | 253 | b'you can (m)erge, keep (l)ocal%(l)s or keep ' |
|
244 | 254 | b'(r)emote%(o)s.\n' |
|
245 | 255 | b'what do you want to do?' |
|
246 | 256 | b'$$ &Merge $$ &Local $$ &Remote' |
|
247 | 257 | ) |
|
248 | 258 | % prompts, |
|
249 | 259 | 0, |
|
250 | 260 | ) |
|
251 | 261 | if option == 0: |
|
252 | 262 | wctx.sub(s).merge(r) |
|
253 | 263 | sm[s] = l |
|
254 | 264 | debug(s, b"merge with", r) |
|
255 | 265 | elif option == 1: |
|
256 | 266 | sm[s] = l |
|
257 | 267 | debug(s, b"keep local subrepo revision", l) |
|
258 | 268 | else: |
|
259 | 269 | wctx.sub(s).get(r, overwrite) |
|
260 | 270 | sm[s] = r |
|
261 | 271 | debug(s, b"get remote subrepo revision", r) |
|
262 | 272 | elif ld == a: # remote removed, local unchanged |
|
263 | 273 | debug(s, b"remote removed, remove") |
|
264 | 274 | wctx.sub(s).remove() |
|
265 | 275 | elif a == nullstate: # not present in remote or ancestor |
|
266 | 276 | debug(s, b"local added, keep") |
|
267 | 277 | sm[s] = l |
|
268 | 278 | continue |
|
269 | 279 | else: |
|
270 | 280 | if repo.ui.promptchoice( |
|
271 | 281 | _( |
|
272 | 282 | b' local%(l)s changed subrepository %(s)s' |
|
273 | 283 | b' which remote%(o)s removed\n' |
|
274 | 284 | b'use (c)hanged version or (d)elete?' |
|
275 | 285 | b'$$ &Changed $$ &Delete' |
|
276 | 286 | ) |
|
277 | 287 | % prompts, |
|
278 | 288 | 0, |
|
279 | 289 | ): |
|
280 | 290 | debug(s, b"prompt remove") |
|
281 | 291 | wctx.sub(s).remove() |
|
282 | 292 | |
|
283 | 293 | for s, r in sorted(s2.items()): |
|
284 | 294 | if s in s1: |
|
285 | 295 | continue |
|
286 | 296 | elif s not in sa: |
|
287 | 297 | debug(s, b"remote added, get", r) |
|
288 | 298 | mctx.sub(s).get(r) |
|
289 | 299 | sm[s] = r |
|
290 | 300 | elif r != sa[s]: |
|
301 | if not s2match(s): | |
|
302 | # ignore changes out of sparse, | |
|
303 | continue | |
|
304 | elif not s1match(s): | |
|
305 | # recreate changes out of sparse, | |
|
306 | # sm[s] = r | |
|
307 | continue | |
|
308 | ||
|
291 | 309 | prompts = promptssrc.copy() |
|
292 | 310 | prompts[b's'] = s |
|
293 | 311 | if ( |
|
294 | 312 | repo.ui.promptchoice( |
|
295 | 313 | _( |
|
296 | 314 | b' remote%(o)s changed subrepository %(s)s' |
|
297 | 315 | b' which local%(l)s removed\n' |
|
298 | 316 | b'use (c)hanged version or (d)elete?' |
|
299 | 317 | b'$$ &Changed $$ &Delete' |
|
300 | 318 | ) |
|
301 | 319 | % prompts, |
|
302 | 320 | 0, |
|
303 | 321 | ) |
|
304 | 322 | == 0 |
|
305 | 323 | ): |
|
306 | 324 | debug(s, b"prompt recreate", r) |
|
307 | 325 | mctx.sub(s).get(r) |
|
308 | 326 | sm[s] = r |
|
309 | 327 | |
|
310 | 328 | # record merged .hgsubstate |
|
311 | 329 | writestate(repo, sm) |
|
312 | 330 | return sm |
|
313 | 331 | |
|
314 | 332 | |
|
315 | 333 | def precommit(ui, wctx, status, match, force=False): |
|
316 | 334 | # type: (uimod.ui, context.workingcommitctx, scmutil.status, matchmod.basematcher, bool) -> Tuple[List[bytes], Set[bytes], Substate] |
|
317 | 335 | """Calculate .hgsubstate changes that should be applied before committing |
|
318 | 336 | |
|
319 | 337 | Returns (subs, commitsubs, newstate) where |
|
320 | 338 | - subs: changed subrepos (including dirty ones) |
|
321 | 339 | - commitsubs: dirty subrepos which the caller needs to commit recursively |
|
322 | 340 | - newstate: new state dict which the caller must write to .hgsubstate |
|
323 | 341 | |
|
324 | 342 | This also updates the given status argument. |
|
325 | 343 | """ |
|
326 | 344 | subs = [] |
|
327 | 345 | commitsubs = set() |
|
328 | 346 | newstate = wctx.substate.copy() |
|
329 | 347 | |
|
330 | 348 | # only manage subrepos and .hgsubstate if .hgsub is present |
|
331 | 349 | if b'.hgsub' in wctx: |
|
332 | 350 | # we'll decide whether to track this ourselves, thanks |
|
333 | 351 | for c in status.modified, status.added, status.removed: |
|
334 | 352 | if b'.hgsubstate' in c: |
|
335 | 353 | c.remove(b'.hgsubstate') |
|
336 | 354 | |
|
337 | 355 | # compare current state to last committed state |
|
338 | 356 | # build new substate based on last committed state |
|
339 | 357 | oldstate = wctx.p1().substate |
|
340 | 358 | for s in sorted(newstate.keys()): |
|
341 | 359 | if not match(s): |
|
342 | 360 | # ignore working copy, use old state if present |
|
343 | 361 | if s in oldstate: |
|
344 | 362 | newstate[s] = oldstate[s] |
|
345 | 363 | continue |
|
346 | 364 | if not force: |
|
347 | 365 | raise error.Abort( |
|
348 | 366 | _(b"commit with new subrepo %s excluded") % s |
|
349 | 367 | ) |
|
350 | 368 | dirtyreason = wctx.sub(s).dirtyreason(True) |
|
351 | 369 | if dirtyreason: |
|
352 | 370 | if not ui.configbool(b'ui', b'commitsubrepos'): |
|
353 | 371 | raise error.Abort( |
|
354 | 372 | dirtyreason, |
|
355 | 373 | hint=_(b"use --subrepos for recursive commit"), |
|
356 | 374 | ) |
|
357 | 375 | subs.append(s) |
|
358 | 376 | commitsubs.add(s) |
|
359 | 377 | else: |
|
360 | 378 | bs = wctx.sub(s).basestate() |
|
361 | 379 | newstate[s] = (newstate[s][0], bs, newstate[s][2]) |
|
362 | 380 | if oldstate.get(s, (None, None, None))[1] != bs: |
|
363 | 381 | subs.append(s) |
|
364 | 382 | |
|
365 | 383 | # check for removed subrepos |
|
366 | 384 | for p in wctx.parents(): |
|
367 | 385 | r = [s for s in p.substate if s not in newstate] |
|
368 | 386 | subs += [s for s in r if match(s)] |
|
369 | 387 | if subs: |
|
370 | 388 | if not match(b'.hgsub') and b'.hgsub' in ( |
|
371 | 389 | wctx.modified() + wctx.added() |
|
372 | 390 | ): |
|
373 | 391 | raise error.Abort(_(b"can't commit subrepos without .hgsub")) |
|
374 | 392 | status.modified.insert(0, b'.hgsubstate') |
|
375 | 393 | |
|
376 | 394 | elif b'.hgsub' in status.removed: |
|
377 | 395 | # clean up .hgsubstate when .hgsub is removed |
|
378 | 396 | if b'.hgsubstate' in wctx and b'.hgsubstate' not in ( |
|
379 | 397 | status.modified + status.added + status.removed |
|
380 | 398 | ): |
|
381 | 399 | status.removed.insert(0, b'.hgsubstate') |
|
382 | 400 | |
|
383 | 401 | return subs, commitsubs, newstate |
|
384 | 402 | |
|
385 | 403 | |
|
386 | 404 | def repo_rel_or_abs_source(repo): |
|
387 | 405 | """return the source of this repo |
|
388 | 406 | |
|
389 | 407 | Either absolute or relative the outermost repo""" |
|
390 | 408 | parent = repo |
|
391 | 409 | chunks = [] |
|
392 | 410 | while util.safehasattr(parent, b'_subparent'): |
|
393 | 411 | source = urlutil.url(parent._subsource) |
|
394 | 412 | chunks.append(bytes(source)) |
|
395 | 413 | if source.isabs(): |
|
396 | 414 | break |
|
397 | 415 | parent = parent._subparent |
|
398 | 416 | |
|
399 | 417 | chunks.reverse() |
|
400 | 418 | path = posixpath.join(*chunks) |
|
401 | 419 | return posixpath.normpath(path) |
|
402 | 420 | |
|
403 | 421 | |
|
404 | 422 | def reporelpath(repo): |
|
405 | 423 | # type: (localrepo.localrepository) -> bytes |
|
406 | 424 | """return path to this (sub)repo as seen from outermost repo""" |
|
407 | 425 | parent = repo |
|
408 | 426 | while util.safehasattr(parent, b'_subparent'): |
|
409 | 427 | parent = parent._subparent |
|
410 | 428 | return repo.root[len(pathutil.normasprefix(parent.root)) :] |
|
411 | 429 | |
|
412 | 430 | |
|
413 | 431 | def subrelpath(sub): |
|
414 | 432 | # type: (subrepo.abstractsubrepo) -> bytes |
|
415 | 433 | """return path to this subrepo as seen from outermost repo""" |
|
416 | 434 | return sub._relpath |
|
417 | 435 | |
|
418 | 436 | |
|
419 | 437 | def _abssource(repo, push=False, abort=True): |
|
420 | 438 | # type: (localrepo.localrepository, bool, bool) -> Optional[bytes] |
|
421 | 439 | """return pull/push path of repo - either based on parent repo .hgsub info |
|
422 | 440 | or on the top repo config. Abort or return None if no source found.""" |
|
423 | 441 | if util.safehasattr(repo, b'_subparent'): |
|
424 | 442 | source = urlutil.url(repo._subsource) |
|
425 | 443 | if source.isabs(): |
|
426 | 444 | return bytes(source) |
|
427 | 445 | source.path = posixpath.normpath(source.path) |
|
428 | 446 | parent = _abssource(repo._subparent, push, abort=False) |
|
429 | 447 | if parent: |
|
430 | 448 | parent = urlutil.url(util.pconvert(parent)) |
|
431 | 449 | parent.path = posixpath.join(parent.path or b'', source.path) |
|
432 | 450 | parent.path = posixpath.normpath(parent.path) |
|
433 | 451 | return bytes(parent) |
|
434 | 452 | else: # recursion reached top repo |
|
435 | 453 | path = None |
|
436 | 454 | if util.safehasattr(repo, b'_subtoppath'): |
|
437 | 455 | path = repo._subtoppath |
|
438 | 456 | elif push and repo.ui.config(b'paths', b'default-push'): |
|
439 | 457 | path = repo.ui.config(b'paths', b'default-push') |
|
440 | 458 | elif repo.ui.config(b'paths', b'default'): |
|
441 | 459 | path = repo.ui.config(b'paths', b'default') |
|
442 | 460 | elif repo.shared(): |
|
443 | 461 | # chop off the .hg component to get the default path form. This has |
|
444 | 462 | # already run through vfsmod.vfs(..., realpath=True), so it doesn't |
|
445 | 463 | # have problems with 'C:' |
|
446 | 464 | return os.path.dirname(repo.sharedpath) |
|
447 | 465 | if path: |
|
448 | 466 | # issue5770: 'C:\' and 'C:' are not equivalent paths. The former is |
|
449 | 467 | # as expected: an absolute path to the root of the C: drive. The |
|
450 | 468 | # latter is a relative path, and works like so: |
|
451 | 469 | # |
|
452 | 470 | # C:\>cd C:\some\path |
|
453 | 471 | # C:\>D: |
|
454 | 472 | # D:\>python -c "import os; print os.path.abspath('C:')" |
|
455 | 473 | # C:\some\path |
|
456 | 474 | # |
|
457 | 475 | # D:\>python -c "import os; print os.path.abspath('C:relative')" |
|
458 | 476 | # C:\some\path\relative |
|
459 | 477 | if urlutil.hasdriveletter(path): |
|
460 | 478 | if len(path) == 2 or path[2:3] not in br'\/': |
|
461 | 479 | path = os.path.abspath(path) |
|
462 | 480 | return path |
|
463 | 481 | |
|
464 | 482 | if abort: |
|
465 | 483 | raise error.Abort(_(b"default path for subrepository not found")) |
|
466 | 484 | |
|
467 | 485 | |
|
468 | 486 | def newcommitphase(ui, ctx): |
|
469 | 487 | # type: (uimod.ui, context.changectx) -> int |
|
470 | 488 | commitphase = phases.newcommitphase(ui) |
|
471 | 489 | substate = getattr(ctx, "substate", None) |
|
472 | 490 | if not substate: |
|
473 | 491 | return commitphase |
|
474 | 492 | check = ui.config(b'phases', b'checksubrepos') |
|
475 | 493 | if check not in (b'ignore', b'follow', b'abort'): |
|
476 | 494 | raise error.Abort( |
|
477 | 495 | _(b'invalid phases.checksubrepos configuration: %s') % check |
|
478 | 496 | ) |
|
479 | 497 | if check == b'ignore': |
|
480 | 498 | return commitphase |
|
481 | 499 | maxphase = phases.public |
|
482 | 500 | maxsub = None |
|
483 | 501 | for s in sorted(substate): |
|
484 | 502 | sub = ctx.sub(s) |
|
485 | 503 | subphase = sub.phase(substate[s][1]) |
|
486 | 504 | if maxphase < subphase: |
|
487 | 505 | maxphase = subphase |
|
488 | 506 | maxsub = s |
|
489 | 507 | if commitphase < maxphase: |
|
490 | 508 | if check == b'abort': |
|
491 | 509 | raise error.Abort( |
|
492 | 510 | _( |
|
493 | 511 | b"can't commit in %s phase" |
|
494 | 512 | b" conflicting %s from subrepository %s" |
|
495 | 513 | ) |
|
496 | 514 | % ( |
|
497 | 515 | phases.phasenames[commitphase], |
|
498 | 516 | phases.phasenames[maxphase], |
|
499 | 517 | maxsub, |
|
500 | 518 | ) |
|
501 | 519 | ) |
|
502 | 520 | ui.warn( |
|
503 | 521 | _( |
|
504 | 522 | b"warning: changes are committed in" |
|
505 | 523 | b" %s phase from subrepository %s\n" |
|
506 | 524 | ) |
|
507 | 525 | % (phases.phasenames[maxphase], maxsub) |
|
508 | 526 | ) |
|
509 | 527 | return maxphase |
|
510 | 528 | return commitphase |
General Comments 0
You need to be logged in to leave comments.
Login now