Show More
@@ -1,311 +1,315 | |||
|
1 | 1 | # config.py - configuration parsing for Mercurial |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2009 Matt Mackall <mpm@selenic.com> and others |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | import errno |
|
11 | 11 | import os |
|
12 | 12 | |
|
13 | 13 | from .i18n import _ |
|
14 | 14 | from .pycompat import getattr |
|
15 | 15 | from . import ( |
|
16 | 16 | encoding, |
|
17 | 17 | error, |
|
18 | 18 | pycompat, |
|
19 | 19 | util, |
|
20 | 20 | ) |
|
21 | 21 | |
|
22 | 22 | |
|
23 | 23 | class config(object): |
|
24 | 24 | def __init__(self, data=None, includepaths=None): |
|
25 | 25 | self._data = {} |
|
26 | 26 | self._unset = [] |
|
27 | 27 | self._includepaths = includepaths or [] |
|
28 | 28 | if data: |
|
29 | 29 | for k in data._data: |
|
30 | 30 | self._data[k] = data[k].copy() |
|
31 | 31 | self._source = data._source.copy() |
|
32 | 32 | else: |
|
33 | 33 | self._source = util.cowdict() |
|
34 | 34 | |
|
35 | 35 | def copy(self): |
|
36 | 36 | return config(self) |
|
37 | 37 | |
|
38 | 38 | def __contains__(self, section): |
|
39 | 39 | return section in self._data |
|
40 | 40 | |
|
41 | 41 | def hasitem(self, section, item): |
|
42 | 42 | return item in self._data.get(section, {}) |
|
43 | 43 | |
|
44 | 44 | def __getitem__(self, section): |
|
45 | 45 | return self._data.get(section, {}) |
|
46 | 46 | |
|
47 | 47 | def __iter__(self): |
|
48 | 48 | for d in self.sections(): |
|
49 | 49 | yield d |
|
50 | 50 | |
|
51 | 51 | def update(self, src): |
|
52 | 52 | self._source = self._source.preparewrite() |
|
53 | 53 | for s, n in src._unset: |
|
54 | 54 | ds = self._data.get(s, None) |
|
55 | 55 | if ds is not None and n in ds: |
|
56 | 56 | self._data[s] = ds.preparewrite() |
|
57 | 57 | del self._data[s][n] |
|
58 | 58 | del self._source[(s, n)] |
|
59 | 59 | for s in src: |
|
60 | 60 | ds = self._data.get(s, None) |
|
61 | 61 | if ds: |
|
62 | 62 | self._data[s] = ds.preparewrite() |
|
63 | 63 | else: |
|
64 | 64 | self._data[s] = util.cowsortdict() |
|
65 | 65 | self._data[s].update(src._data[s]) |
|
66 | 66 | self._source.update(src._source) |
|
67 | 67 | |
|
68 | 68 | def get(self, section, item, default=None): |
|
69 | 69 | return self._data.get(section, {}).get(item, default) |
|
70 | 70 | |
|
71 | 71 | def backup(self, section, item): |
|
72 | 72 | """return a tuple allowing restore to reinstall a previous value |
|
73 | 73 | |
|
74 | 74 | The main reason we need it is because it handles the "no data" case. |
|
75 | 75 | """ |
|
76 | 76 | try: |
|
77 | 77 | value = self._data[section][item] |
|
78 | 78 | source = self.source(section, item) |
|
79 | 79 | return (section, item, value, source) |
|
80 | 80 | except KeyError: |
|
81 | 81 | return (section, item) |
|
82 | 82 | |
|
83 | 83 | def source(self, section, item): |
|
84 | 84 | return self._source.get((section, item), b"") |
|
85 | 85 | |
|
86 | 86 | def sections(self): |
|
87 | 87 | return sorted(self._data.keys()) |
|
88 | 88 | |
|
89 | 89 | def items(self, section): |
|
90 | 90 | return list(pycompat.iteritems(self._data.get(section, {}))) |
|
91 | 91 | |
|
92 | 92 | def set(self, section, item, value, source=b""): |
|
93 | 93 | if pycompat.ispy3: |
|
94 | 94 | assert not isinstance( |
|
95 | 95 | section, str |
|
96 | 96 | ), b'config section may not be unicode strings on Python 3' |
|
97 | 97 | assert not isinstance( |
|
98 | 98 | item, str |
|
99 | 99 | ), b'config item may not be unicode strings on Python 3' |
|
100 | 100 | assert not isinstance( |
|
101 | 101 | value, str |
|
102 | 102 | ), b'config values may not be unicode strings on Python 3' |
|
103 | 103 | if section not in self: |
|
104 | 104 | self._data[section] = util.cowsortdict() |
|
105 | 105 | else: |
|
106 | 106 | self._data[section] = self._data[section].preparewrite() |
|
107 | 107 | self._data[section][item] = value |
|
108 | 108 | if source: |
|
109 | 109 | self._source = self._source.preparewrite() |
|
110 | 110 | self._source[(section, item)] = source |
|
111 | 111 | |
|
112 | 112 | def restore(self, data): |
|
113 | 113 | """restore data returned by self.backup""" |
|
114 | 114 | self._source = self._source.preparewrite() |
|
115 | 115 | if len(data) == 4: |
|
116 | 116 | # restore old data |
|
117 | 117 | section, item, value, source = data |
|
118 | 118 | self._data[section] = self._data[section].preparewrite() |
|
119 | 119 | self._data[section][item] = value |
|
120 | 120 | self._source[(section, item)] = source |
|
121 | 121 | else: |
|
122 | 122 | # no data before, remove everything |
|
123 | 123 | section, item = data |
|
124 | 124 | if section in self._data: |
|
125 | 125 | self._data[section].pop(item, None) |
|
126 | 126 | self._source.pop((section, item), None) |
|
127 | 127 | |
|
128 | 128 | def parse(self, src, data, sections=None, remap=None, include=None): |
|
129 | 129 | sectionre = util.re.compile(br'\[([^\[]+)\]') |
|
130 | 130 | itemre = util.re.compile(br'([^=\s][^=]*?)\s*=\s*(.*\S|)') |
|
131 | 131 | contre = util.re.compile(br'\s+(\S|\S.*\S)\s*$') |
|
132 | 132 | emptyre = util.re.compile(br'(;|#|\s*$)') |
|
133 | 133 | commentre = util.re.compile(br'(;|#)') |
|
134 | 134 | unsetre = util.re.compile(br'%unset\s+(\S+)') |
|
135 | 135 | includere = util.re.compile(br'%include\s+(\S|\S.*\S)\s*$') |
|
136 | 136 | section = b"" |
|
137 | 137 | item = None |
|
138 | 138 | line = 0 |
|
139 | 139 | cont = False |
|
140 | 140 | |
|
141 | 141 | if remap: |
|
142 | 142 | section = remap.get(section, section) |
|
143 | 143 | |
|
144 | 144 | for l in data.splitlines(True): |
|
145 | 145 | line += 1 |
|
146 | 146 | if line == 1 and l.startswith(b'\xef\xbb\xbf'): |
|
147 | 147 | # Someone set us up the BOM |
|
148 | 148 | l = l[3:] |
|
149 | 149 | if cont: |
|
150 | 150 | if commentre.match(l): |
|
151 | 151 | continue |
|
152 | 152 | m = contre.match(l) |
|
153 | 153 | if m: |
|
154 | 154 | if sections and section not in sections: |
|
155 | 155 | continue |
|
156 | 156 | v = self.get(section, item) + b"\n" + m.group(1) |
|
157 | 157 | self.set(section, item, v, b"%s:%d" % (src, line)) |
|
158 | 158 | continue |
|
159 | 159 | item = None |
|
160 | 160 | cont = False |
|
161 | 161 | m = includere.match(l) |
|
162 | 162 | |
|
163 | 163 | if m and include: |
|
164 | 164 | expanded = util.expandpath(m.group(1)) |
|
165 | 165 | includepaths = [os.path.dirname(src)] + self._includepaths |
|
166 | 166 | |
|
167 | 167 | for base in includepaths: |
|
168 | 168 | inc = os.path.normpath(os.path.join(base, expanded)) |
|
169 | 169 | |
|
170 | 170 | try: |
|
171 | include(inc, remap=remap, sections=sections) | |
|
171 | include(expanded, inc, remap=remap, sections=sections) | |
|
172 | 172 | break |
|
173 | 173 | except IOError as inst: |
|
174 | 174 | if inst.errno != errno.ENOENT: |
|
175 | 175 | raise error.ParseError( |
|
176 | 176 | _(b"cannot include %s (%s)") |
|
177 | 177 | % (inc, encoding.strtolocal(inst.strerror)), |
|
178 | 178 | b"%s:%d" % (src, line), |
|
179 | 179 | ) |
|
180 | 180 | continue |
|
181 | 181 | if emptyre.match(l): |
|
182 | 182 | continue |
|
183 | 183 | m = sectionre.match(l) |
|
184 | 184 | if m: |
|
185 | 185 | section = m.group(1) |
|
186 | 186 | if remap: |
|
187 | 187 | section = remap.get(section, section) |
|
188 | 188 | if section not in self: |
|
189 | 189 | self._data[section] = util.cowsortdict() |
|
190 | 190 | continue |
|
191 | 191 | m = itemre.match(l) |
|
192 | 192 | if m: |
|
193 | 193 | item = m.group(1) |
|
194 | 194 | cont = True |
|
195 | 195 | if sections and section not in sections: |
|
196 | 196 | continue |
|
197 | 197 | self.set(section, item, m.group(2), b"%s:%d" % (src, line)) |
|
198 | 198 | continue |
|
199 | 199 | m = unsetre.match(l) |
|
200 | 200 | if m: |
|
201 | 201 | name = m.group(1) |
|
202 | 202 | if sections and section not in sections: |
|
203 | 203 | continue |
|
204 | 204 | if self.get(section, name) is not None: |
|
205 | 205 | self._data[section] = self._data[section].preparewrite() |
|
206 | 206 | del self._data[section][name] |
|
207 | 207 | self._unset.append((section, name)) |
|
208 | 208 | continue |
|
209 | 209 | |
|
210 | 210 | raise error.ParseError(l.rstrip(), (b"%s:%d" % (src, line))) |
|
211 | 211 | |
|
212 | 212 | def read(self, path, fp=None, sections=None, remap=None): |
|
213 | 213 | if not fp: |
|
214 | 214 | fp = util.posixfile(path, b'rb') |
|
215 | 215 | assert getattr(fp, 'mode', 'rb') == 'rb', ( |
|
216 | 216 | b'config files must be opened in binary mode, got fp=%r mode=%r' |
|
217 | 217 | % (fp, fp.mode,) |
|
218 | 218 | ) |
|
219 | ||
|
220 | def include(rel, abs, remap, sections): | |
|
221 | self.read(abs, remap=remap, sections=sections) | |
|
222 | ||
|
219 | 223 | self.parse( |
|
220 |
path, fp.read(), sections=sections, remap=remap, include= |
|
|
224 | path, fp.read(), sections=sections, remap=remap, include=include | |
|
221 | 225 | ) |
|
222 | 226 | |
|
223 | 227 | |
|
224 | 228 | def parselist(value): |
|
225 | 229 | """parse a configuration value as a list of comma/space separated strings |
|
226 | 230 | |
|
227 | 231 | >>> parselist(b'this,is "a small" ,test') |
|
228 | 232 | ['this', 'is', 'a small', 'test'] |
|
229 | 233 | """ |
|
230 | 234 | |
|
231 | 235 | def _parse_plain(parts, s, offset): |
|
232 | 236 | whitespace = False |
|
233 | 237 | while offset < len(s) and ( |
|
234 | 238 | s[offset : offset + 1].isspace() or s[offset : offset + 1] == b',' |
|
235 | 239 | ): |
|
236 | 240 | whitespace = True |
|
237 | 241 | offset += 1 |
|
238 | 242 | if offset >= len(s): |
|
239 | 243 | return None, parts, offset |
|
240 | 244 | if whitespace: |
|
241 | 245 | parts.append(b'') |
|
242 | 246 | if s[offset : offset + 1] == b'"' and not parts[-1]: |
|
243 | 247 | return _parse_quote, parts, offset + 1 |
|
244 | 248 | elif s[offset : offset + 1] == b'"' and parts[-1][-1:] == b'\\': |
|
245 | 249 | parts[-1] = parts[-1][:-1] + s[offset : offset + 1] |
|
246 | 250 | return _parse_plain, parts, offset + 1 |
|
247 | 251 | parts[-1] += s[offset : offset + 1] |
|
248 | 252 | return _parse_plain, parts, offset + 1 |
|
249 | 253 | |
|
250 | 254 | def _parse_quote(parts, s, offset): |
|
251 | 255 | if offset < len(s) and s[offset : offset + 1] == b'"': # "" |
|
252 | 256 | parts.append(b'') |
|
253 | 257 | offset += 1 |
|
254 | 258 | while offset < len(s) and ( |
|
255 | 259 | s[offset : offset + 1].isspace() |
|
256 | 260 | or s[offset : offset + 1] == b',' |
|
257 | 261 | ): |
|
258 | 262 | offset += 1 |
|
259 | 263 | return _parse_plain, parts, offset |
|
260 | 264 | |
|
261 | 265 | while offset < len(s) and s[offset : offset + 1] != b'"': |
|
262 | 266 | if ( |
|
263 | 267 | s[offset : offset + 1] == b'\\' |
|
264 | 268 | and offset + 1 < len(s) |
|
265 | 269 | and s[offset + 1 : offset + 2] == b'"' |
|
266 | 270 | ): |
|
267 | 271 | offset += 1 |
|
268 | 272 | parts[-1] += b'"' |
|
269 | 273 | else: |
|
270 | 274 | parts[-1] += s[offset : offset + 1] |
|
271 | 275 | offset += 1 |
|
272 | 276 | |
|
273 | 277 | if offset >= len(s): |
|
274 | 278 | real_parts = _configlist(parts[-1]) |
|
275 | 279 | if not real_parts: |
|
276 | 280 | parts[-1] = b'"' |
|
277 | 281 | else: |
|
278 | 282 | real_parts[0] = b'"' + real_parts[0] |
|
279 | 283 | parts = parts[:-1] |
|
280 | 284 | parts.extend(real_parts) |
|
281 | 285 | return None, parts, offset |
|
282 | 286 | |
|
283 | 287 | offset += 1 |
|
284 | 288 | while offset < len(s) and s[offset : offset + 1] in [b' ', b',']: |
|
285 | 289 | offset += 1 |
|
286 | 290 | |
|
287 | 291 | if offset < len(s): |
|
288 | 292 | if offset + 1 == len(s) and s[offset : offset + 1] == b'"': |
|
289 | 293 | parts[-1] += b'"' |
|
290 | 294 | offset += 1 |
|
291 | 295 | else: |
|
292 | 296 | parts.append(b'') |
|
293 | 297 | else: |
|
294 | 298 | return None, parts, offset |
|
295 | 299 | |
|
296 | 300 | return _parse_plain, parts, offset |
|
297 | 301 | |
|
298 | 302 | def _configlist(s): |
|
299 | 303 | s = s.rstrip(b' ,') |
|
300 | 304 | if not s: |
|
301 | 305 | return [] |
|
302 | 306 | parser, parts, offset = _parse_plain, [b''], 0 |
|
303 | 307 | while parser: |
|
304 | 308 | parser, parts, offset = parser(parts, s, offset) |
|
305 | 309 | return parts |
|
306 | 310 | |
|
307 | 311 | if value is not None and isinstance(value, bytes): |
|
308 | 312 | result = _configlist(value.lstrip(b' ,\n')) |
|
309 | 313 | else: |
|
310 | 314 | result = value |
|
311 | 315 | return result or [] |
@@ -1,459 +1,459 | |||
|
1 | 1 | # subrepoutil.py - sub-repository operations and substate handling |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2009-2010 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | import errno |
|
11 | 11 | import os |
|
12 | 12 | import posixpath |
|
13 | 13 | import re |
|
14 | 14 | |
|
15 | 15 | from .i18n import _ |
|
16 | 16 | from .pycompat import getattr |
|
17 | 17 | from . import ( |
|
18 | 18 | config, |
|
19 | 19 | error, |
|
20 | 20 | filemerge, |
|
21 | 21 | pathutil, |
|
22 | 22 | phases, |
|
23 | 23 | pycompat, |
|
24 | 24 | util, |
|
25 | 25 | ) |
|
26 | 26 | from .utils import stringutil |
|
27 | 27 | |
|
28 | 28 | nullstate = (b'', b'', b'empty') |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | def state(ctx, ui): |
|
32 | 32 | """return a state dict, mapping subrepo paths configured in .hgsub |
|
33 | 33 | to tuple: (source from .hgsub, revision from .hgsubstate, kind |
|
34 | 34 | (key in types dict)) |
|
35 | 35 | """ |
|
36 | 36 | p = config.config() |
|
37 | 37 | repo = ctx.repo() |
|
38 | 38 | |
|
39 | def read(f, sections=None, remap=None): | |
|
39 | def read(rel, f, sections=None, remap=None): | |
|
40 | 40 | if f in ctx: |
|
41 | 41 | try: |
|
42 | 42 | data = ctx[f].data() |
|
43 | 43 | except IOError as err: |
|
44 | 44 | if err.errno != errno.ENOENT: |
|
45 | 45 | raise |
|
46 | 46 | # handle missing subrepo spec files as removed |
|
47 | 47 | ui.warn( |
|
48 | 48 | _(b"warning: subrepo spec file \'%s\' not found\n") |
|
49 | 49 | % repo.pathto(f) |
|
50 | 50 | ) |
|
51 | 51 | return |
|
52 | 52 | p.parse(f, data, sections, remap, read) |
|
53 | 53 | else: |
|
54 | 54 | raise error.Abort( |
|
55 | 55 | _(b"subrepo spec file \'%s\' not found") % repo.pathto(f) |
|
56 | 56 | ) |
|
57 | 57 | |
|
58 | 58 | if b'.hgsub' in ctx: |
|
59 | read(b'.hgsub') | |
|
59 | read(b'.hgsub', b'.hgsub') | |
|
60 | 60 | |
|
61 | 61 | for path, src in ui.configitems(b'subpaths'): |
|
62 | 62 | p.set(b'subpaths', path, src, ui.configsource(b'subpaths', path)) |
|
63 | 63 | |
|
64 | 64 | rev = {} |
|
65 | 65 | if b'.hgsubstate' in ctx: |
|
66 | 66 | try: |
|
67 | 67 | for i, l in enumerate(ctx[b'.hgsubstate'].data().splitlines()): |
|
68 | 68 | l = l.lstrip() |
|
69 | 69 | if not l: |
|
70 | 70 | continue |
|
71 | 71 | try: |
|
72 | 72 | revision, path = l.split(b" ", 1) |
|
73 | 73 | except ValueError: |
|
74 | 74 | raise error.Abort( |
|
75 | 75 | _( |
|
76 | 76 | b"invalid subrepository revision " |
|
77 | 77 | b"specifier in \'%s\' line %d" |
|
78 | 78 | ) |
|
79 | 79 | % (repo.pathto(b'.hgsubstate'), (i + 1)) |
|
80 | 80 | ) |
|
81 | 81 | rev[path] = revision |
|
82 | 82 | except IOError as err: |
|
83 | 83 | if err.errno != errno.ENOENT: |
|
84 | 84 | raise |
|
85 | 85 | |
|
86 | 86 | def remap(src): |
|
87 | 87 | for pattern, repl in p.items(b'subpaths'): |
|
88 | 88 | # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub |
|
89 | 89 | # does a string decode. |
|
90 | 90 | repl = stringutil.escapestr(repl) |
|
91 | 91 | # However, we still want to allow back references to go |
|
92 | 92 | # through unharmed, so we turn r'\\1' into r'\1'. Again, |
|
93 | 93 | # extra escapes are needed because re.sub string decodes. |
|
94 | 94 | repl = re.sub(br'\\\\([0-9]+)', br'\\\1', repl) |
|
95 | 95 | try: |
|
96 | 96 | src = re.sub(pattern, repl, src, 1) |
|
97 | 97 | except re.error as e: |
|
98 | 98 | raise error.Abort( |
|
99 | 99 | _(b"bad subrepository pattern in %s: %s") |
|
100 | 100 | % ( |
|
101 | 101 | p.source(b'subpaths', pattern), |
|
102 | 102 | stringutil.forcebytestr(e), |
|
103 | 103 | ) |
|
104 | 104 | ) |
|
105 | 105 | return src |
|
106 | 106 | |
|
107 | 107 | state = {} |
|
108 | 108 | for path, src in p[b''].items(): |
|
109 | 109 | kind = b'hg' |
|
110 | 110 | if src.startswith(b'['): |
|
111 | 111 | if b']' not in src: |
|
112 | 112 | raise error.Abort(_(b'missing ] in subrepository source')) |
|
113 | 113 | kind, src = src.split(b']', 1) |
|
114 | 114 | kind = kind[1:] |
|
115 | 115 | src = src.lstrip() # strip any extra whitespace after ']' |
|
116 | 116 | |
|
117 | 117 | if not util.url(src).isabs(): |
|
118 | 118 | parent = _abssource(repo, abort=False) |
|
119 | 119 | if parent: |
|
120 | 120 | parent = util.url(parent) |
|
121 | 121 | parent.path = posixpath.join(parent.path or b'', src) |
|
122 | 122 | parent.path = posixpath.normpath(parent.path) |
|
123 | 123 | joined = bytes(parent) |
|
124 | 124 | # Remap the full joined path and use it if it changes, |
|
125 | 125 | # else remap the original source. |
|
126 | 126 | remapped = remap(joined) |
|
127 | 127 | if remapped == joined: |
|
128 | 128 | src = remap(src) |
|
129 | 129 | else: |
|
130 | 130 | src = remapped |
|
131 | 131 | |
|
132 | 132 | src = remap(src) |
|
133 | 133 | state[util.pconvert(path)] = (src.strip(), rev.get(path, b''), kind) |
|
134 | 134 | |
|
135 | 135 | return state |
|
136 | 136 | |
|
137 | 137 | |
|
138 | 138 | def writestate(repo, state): |
|
139 | 139 | """rewrite .hgsubstate in (outer) repo with these subrepo states""" |
|
140 | 140 | lines = [ |
|
141 | 141 | b'%s %s\n' % (state[s][1], s) |
|
142 | 142 | for s in sorted(state) |
|
143 | 143 | if state[s][1] != nullstate[1] |
|
144 | 144 | ] |
|
145 | 145 | repo.wwrite(b'.hgsubstate', b''.join(lines), b'') |
|
146 | 146 | |
|
147 | 147 | |
|
148 | 148 | def submerge(repo, wctx, mctx, actx, overwrite, labels=None): |
|
149 | 149 | """delegated from merge.applyupdates: merging of .hgsubstate file |
|
150 | 150 | in working context, merging context and ancestor context""" |
|
151 | 151 | if mctx == actx: # backwards? |
|
152 | 152 | actx = wctx.p1() |
|
153 | 153 | s1 = wctx.substate |
|
154 | 154 | s2 = mctx.substate |
|
155 | 155 | sa = actx.substate |
|
156 | 156 | sm = {} |
|
157 | 157 | |
|
158 | 158 | repo.ui.debug(b"subrepo merge %s %s %s\n" % (wctx, mctx, actx)) |
|
159 | 159 | |
|
160 | 160 | def debug(s, msg, r=b""): |
|
161 | 161 | if r: |
|
162 | 162 | r = b"%s:%s:%s" % r |
|
163 | 163 | repo.ui.debug(b" subrepo %s: %s %s\n" % (s, msg, r)) |
|
164 | 164 | |
|
165 | 165 | promptssrc = filemerge.partextras(labels) |
|
166 | 166 | for s, l in sorted(pycompat.iteritems(s1)): |
|
167 | 167 | a = sa.get(s, nullstate) |
|
168 | 168 | ld = l # local state with possible dirty flag for compares |
|
169 | 169 | if wctx.sub(s).dirty(): |
|
170 | 170 | ld = (l[0], l[1] + b"+") |
|
171 | 171 | if wctx == actx: # overwrite |
|
172 | 172 | a = ld |
|
173 | 173 | |
|
174 | 174 | prompts = promptssrc.copy() |
|
175 | 175 | prompts[b's'] = s |
|
176 | 176 | if s in s2: |
|
177 | 177 | r = s2[s] |
|
178 | 178 | if ld == r or r == a: # no change or local is newer |
|
179 | 179 | sm[s] = l |
|
180 | 180 | continue |
|
181 | 181 | elif ld == a: # other side changed |
|
182 | 182 | debug(s, b"other changed, get", r) |
|
183 | 183 | wctx.sub(s).get(r, overwrite) |
|
184 | 184 | sm[s] = r |
|
185 | 185 | elif ld[0] != r[0]: # sources differ |
|
186 | 186 | prompts[b'lo'] = l[0] |
|
187 | 187 | prompts[b'ro'] = r[0] |
|
188 | 188 | if repo.ui.promptchoice( |
|
189 | 189 | _( |
|
190 | 190 | b' subrepository sources for %(s)s differ\n' |
|
191 | 191 | b'you can use (l)ocal%(l)s source (%(lo)s)' |
|
192 | 192 | b' or (r)emote%(o)s source (%(ro)s).\n' |
|
193 | 193 | b'what do you want to do?' |
|
194 | 194 | b'$$ &Local $$ &Remote' |
|
195 | 195 | ) |
|
196 | 196 | % prompts, |
|
197 | 197 | 0, |
|
198 | 198 | ): |
|
199 | 199 | debug(s, b"prompt changed, get", r) |
|
200 | 200 | wctx.sub(s).get(r, overwrite) |
|
201 | 201 | sm[s] = r |
|
202 | 202 | elif ld[1] == a[1]: # local side is unchanged |
|
203 | 203 | debug(s, b"other side changed, get", r) |
|
204 | 204 | wctx.sub(s).get(r, overwrite) |
|
205 | 205 | sm[s] = r |
|
206 | 206 | else: |
|
207 | 207 | debug(s, b"both sides changed") |
|
208 | 208 | srepo = wctx.sub(s) |
|
209 | 209 | prompts[b'sl'] = srepo.shortid(l[1]) |
|
210 | 210 | prompts[b'sr'] = srepo.shortid(r[1]) |
|
211 | 211 | option = repo.ui.promptchoice( |
|
212 | 212 | _( |
|
213 | 213 | b' subrepository %(s)s diverged (local revision: %(sl)s, ' |
|
214 | 214 | b'remote revision: %(sr)s)\n' |
|
215 | 215 | b'you can (m)erge, keep (l)ocal%(l)s or keep ' |
|
216 | 216 | b'(r)emote%(o)s.\n' |
|
217 | 217 | b'what do you want to do?' |
|
218 | 218 | b'$$ &Merge $$ &Local $$ &Remote' |
|
219 | 219 | ) |
|
220 | 220 | % prompts, |
|
221 | 221 | 0, |
|
222 | 222 | ) |
|
223 | 223 | if option == 0: |
|
224 | 224 | wctx.sub(s).merge(r) |
|
225 | 225 | sm[s] = l |
|
226 | 226 | debug(s, b"merge with", r) |
|
227 | 227 | elif option == 1: |
|
228 | 228 | sm[s] = l |
|
229 | 229 | debug(s, b"keep local subrepo revision", l) |
|
230 | 230 | else: |
|
231 | 231 | wctx.sub(s).get(r, overwrite) |
|
232 | 232 | sm[s] = r |
|
233 | 233 | debug(s, b"get remote subrepo revision", r) |
|
234 | 234 | elif ld == a: # remote removed, local unchanged |
|
235 | 235 | debug(s, b"remote removed, remove") |
|
236 | 236 | wctx.sub(s).remove() |
|
237 | 237 | elif a == nullstate: # not present in remote or ancestor |
|
238 | 238 | debug(s, b"local added, keep") |
|
239 | 239 | sm[s] = l |
|
240 | 240 | continue |
|
241 | 241 | else: |
|
242 | 242 | if repo.ui.promptchoice( |
|
243 | 243 | _( |
|
244 | 244 | b' local%(l)s changed subrepository %(s)s' |
|
245 | 245 | b' which remote%(o)s removed\n' |
|
246 | 246 | b'use (c)hanged version or (d)elete?' |
|
247 | 247 | b'$$ &Changed $$ &Delete' |
|
248 | 248 | ) |
|
249 | 249 | % prompts, |
|
250 | 250 | 0, |
|
251 | 251 | ): |
|
252 | 252 | debug(s, b"prompt remove") |
|
253 | 253 | wctx.sub(s).remove() |
|
254 | 254 | |
|
255 | 255 | for s, r in sorted(s2.items()): |
|
256 | 256 | if s in s1: |
|
257 | 257 | continue |
|
258 | 258 | elif s not in sa: |
|
259 | 259 | debug(s, b"remote added, get", r) |
|
260 | 260 | mctx.sub(s).get(r) |
|
261 | 261 | sm[s] = r |
|
262 | 262 | elif r != sa[s]: |
|
263 | 263 | prompts = promptssrc.copy() |
|
264 | 264 | prompts[b's'] = s |
|
265 | 265 | if ( |
|
266 | 266 | repo.ui.promptchoice( |
|
267 | 267 | _( |
|
268 | 268 | b' remote%(o)s changed subrepository %(s)s' |
|
269 | 269 | b' which local%(l)s removed\n' |
|
270 | 270 | b'use (c)hanged version or (d)elete?' |
|
271 | 271 | b'$$ &Changed $$ &Delete' |
|
272 | 272 | ) |
|
273 | 273 | % prompts, |
|
274 | 274 | 0, |
|
275 | 275 | ) |
|
276 | 276 | == 0 |
|
277 | 277 | ): |
|
278 | 278 | debug(s, b"prompt recreate", r) |
|
279 | 279 | mctx.sub(s).get(r) |
|
280 | 280 | sm[s] = r |
|
281 | 281 | |
|
282 | 282 | # record merged .hgsubstate |
|
283 | 283 | writestate(repo, sm) |
|
284 | 284 | return sm |
|
285 | 285 | |
|
286 | 286 | |
|
287 | 287 | def precommit(ui, wctx, status, match, force=False): |
|
288 | 288 | """Calculate .hgsubstate changes that should be applied before committing |
|
289 | 289 | |
|
290 | 290 | Returns (subs, commitsubs, newstate) where |
|
291 | 291 | - subs: changed subrepos (including dirty ones) |
|
292 | 292 | - commitsubs: dirty subrepos which the caller needs to commit recursively |
|
293 | 293 | - newstate: new state dict which the caller must write to .hgsubstate |
|
294 | 294 | |
|
295 | 295 | This also updates the given status argument. |
|
296 | 296 | """ |
|
297 | 297 | subs = [] |
|
298 | 298 | commitsubs = set() |
|
299 | 299 | newstate = wctx.substate.copy() |
|
300 | 300 | |
|
301 | 301 | # only manage subrepos and .hgsubstate if .hgsub is present |
|
302 | 302 | if b'.hgsub' in wctx: |
|
303 | 303 | # we'll decide whether to track this ourselves, thanks |
|
304 | 304 | for c in status.modified, status.added, status.removed: |
|
305 | 305 | if b'.hgsubstate' in c: |
|
306 | 306 | c.remove(b'.hgsubstate') |
|
307 | 307 | |
|
308 | 308 | # compare current state to last committed state |
|
309 | 309 | # build new substate based on last committed state |
|
310 | 310 | oldstate = wctx.p1().substate |
|
311 | 311 | for s in sorted(newstate.keys()): |
|
312 | 312 | if not match(s): |
|
313 | 313 | # ignore working copy, use old state if present |
|
314 | 314 | if s in oldstate: |
|
315 | 315 | newstate[s] = oldstate[s] |
|
316 | 316 | continue |
|
317 | 317 | if not force: |
|
318 | 318 | raise error.Abort( |
|
319 | 319 | _(b"commit with new subrepo %s excluded") % s |
|
320 | 320 | ) |
|
321 | 321 | dirtyreason = wctx.sub(s).dirtyreason(True) |
|
322 | 322 | if dirtyreason: |
|
323 | 323 | if not ui.configbool(b'ui', b'commitsubrepos'): |
|
324 | 324 | raise error.Abort( |
|
325 | 325 | dirtyreason, |
|
326 | 326 | hint=_(b"use --subrepos for recursive commit"), |
|
327 | 327 | ) |
|
328 | 328 | subs.append(s) |
|
329 | 329 | commitsubs.add(s) |
|
330 | 330 | else: |
|
331 | 331 | bs = wctx.sub(s).basestate() |
|
332 | 332 | newstate[s] = (newstate[s][0], bs, newstate[s][2]) |
|
333 | 333 | if oldstate.get(s, (None, None, None))[1] != bs: |
|
334 | 334 | subs.append(s) |
|
335 | 335 | |
|
336 | 336 | # check for removed subrepos |
|
337 | 337 | for p in wctx.parents(): |
|
338 | 338 | r = [s for s in p.substate if s not in newstate] |
|
339 | 339 | subs += [s for s in r if match(s)] |
|
340 | 340 | if subs: |
|
341 | 341 | if not match(b'.hgsub') and b'.hgsub' in ( |
|
342 | 342 | wctx.modified() + wctx.added() |
|
343 | 343 | ): |
|
344 | 344 | raise error.Abort(_(b"can't commit subrepos without .hgsub")) |
|
345 | 345 | status.modified.insert(0, b'.hgsubstate') |
|
346 | 346 | |
|
347 | 347 | elif b'.hgsub' in status.removed: |
|
348 | 348 | # clean up .hgsubstate when .hgsub is removed |
|
349 | 349 | if b'.hgsubstate' in wctx and b'.hgsubstate' not in ( |
|
350 | 350 | status.modified + status.added + status.removed |
|
351 | 351 | ): |
|
352 | 352 | status.removed.insert(0, b'.hgsubstate') |
|
353 | 353 | |
|
354 | 354 | return subs, commitsubs, newstate |
|
355 | 355 | |
|
356 | 356 | |
|
357 | 357 | def reporelpath(repo): |
|
358 | 358 | """return path to this (sub)repo as seen from outermost repo""" |
|
359 | 359 | parent = repo |
|
360 | 360 | while util.safehasattr(parent, b'_subparent'): |
|
361 | 361 | parent = parent._subparent |
|
362 | 362 | return repo.root[len(pathutil.normasprefix(parent.root)) :] |
|
363 | 363 | |
|
364 | 364 | |
|
365 | 365 | def subrelpath(sub): |
|
366 | 366 | """return path to this subrepo as seen from outermost repo""" |
|
367 | 367 | return sub._relpath |
|
368 | 368 | |
|
369 | 369 | |
|
370 | 370 | def _abssource(repo, push=False, abort=True): |
|
371 | 371 | """return pull/push path of repo - either based on parent repo .hgsub info |
|
372 | 372 | or on the top repo config. Abort or return None if no source found.""" |
|
373 | 373 | if util.safehasattr(repo, b'_subparent'): |
|
374 | 374 | source = util.url(repo._subsource) |
|
375 | 375 | if source.isabs(): |
|
376 | 376 | return bytes(source) |
|
377 | 377 | source.path = posixpath.normpath(source.path) |
|
378 | 378 | parent = _abssource(repo._subparent, push, abort=False) |
|
379 | 379 | if parent: |
|
380 | 380 | parent = util.url(util.pconvert(parent)) |
|
381 | 381 | parent.path = posixpath.join(parent.path or b'', source.path) |
|
382 | 382 | parent.path = posixpath.normpath(parent.path) |
|
383 | 383 | return bytes(parent) |
|
384 | 384 | else: # recursion reached top repo |
|
385 | 385 | path = None |
|
386 | 386 | if util.safehasattr(repo, b'_subtoppath'): |
|
387 | 387 | path = repo._subtoppath |
|
388 | 388 | elif push and repo.ui.config(b'paths', b'default-push'): |
|
389 | 389 | path = repo.ui.config(b'paths', b'default-push') |
|
390 | 390 | elif repo.ui.config(b'paths', b'default'): |
|
391 | 391 | path = repo.ui.config(b'paths', b'default') |
|
392 | 392 | elif repo.shared(): |
|
393 | 393 | # chop off the .hg component to get the default path form. This has |
|
394 | 394 | # already run through vfsmod.vfs(..., realpath=True), so it doesn't |
|
395 | 395 | # have problems with 'C:' |
|
396 | 396 | return os.path.dirname(repo.sharedpath) |
|
397 | 397 | if path: |
|
398 | 398 | # issue5770: 'C:\' and 'C:' are not equivalent paths. The former is |
|
399 | 399 | # as expected: an absolute path to the root of the C: drive. The |
|
400 | 400 | # latter is a relative path, and works like so: |
|
401 | 401 | # |
|
402 | 402 | # C:\>cd C:\some\path |
|
403 | 403 | # C:\>D: |
|
404 | 404 | # D:\>python -c "import os; print os.path.abspath('C:')" |
|
405 | 405 | # C:\some\path |
|
406 | 406 | # |
|
407 | 407 | # D:\>python -c "import os; print os.path.abspath('C:relative')" |
|
408 | 408 | # C:\some\path\relative |
|
409 | 409 | if util.hasdriveletter(path): |
|
410 | 410 | if len(path) == 2 or path[2:3] not in br'\/': |
|
411 | 411 | path = os.path.abspath(path) |
|
412 | 412 | return path |
|
413 | 413 | |
|
414 | 414 | if abort: |
|
415 | 415 | raise error.Abort(_(b"default path for subrepository not found")) |
|
416 | 416 | |
|
417 | 417 | |
|
418 | 418 | def newcommitphase(ui, ctx): |
|
419 | 419 | commitphase = phases.newcommitphase(ui) |
|
420 | 420 | substate = getattr(ctx, "substate", None) |
|
421 | 421 | if not substate: |
|
422 | 422 | return commitphase |
|
423 | 423 | check = ui.config(b'phases', b'checksubrepos') |
|
424 | 424 | if check not in (b'ignore', b'follow', b'abort'): |
|
425 | 425 | raise error.Abort( |
|
426 | 426 | _(b'invalid phases.checksubrepos configuration: %s') % check |
|
427 | 427 | ) |
|
428 | 428 | if check == b'ignore': |
|
429 | 429 | return commitphase |
|
430 | 430 | maxphase = phases.public |
|
431 | 431 | maxsub = None |
|
432 | 432 | for s in sorted(substate): |
|
433 | 433 | sub = ctx.sub(s) |
|
434 | 434 | subphase = sub.phase(substate[s][1]) |
|
435 | 435 | if maxphase < subphase: |
|
436 | 436 | maxphase = subphase |
|
437 | 437 | maxsub = s |
|
438 | 438 | if commitphase < maxphase: |
|
439 | 439 | if check == b'abort': |
|
440 | 440 | raise error.Abort( |
|
441 | 441 | _( |
|
442 | 442 | b"can't commit in %s phase" |
|
443 | 443 | b" conflicting %s from subrepository %s" |
|
444 | 444 | ) |
|
445 | 445 | % ( |
|
446 | 446 | phases.phasenames[commitphase], |
|
447 | 447 | phases.phasenames[maxphase], |
|
448 | 448 | maxsub, |
|
449 | 449 | ) |
|
450 | 450 | ) |
|
451 | 451 | ui.warn( |
|
452 | 452 | _( |
|
453 | 453 | b"warning: changes are committed in" |
|
454 | 454 | b" %s phase from subrepository %s\n" |
|
455 | 455 | ) |
|
456 | 456 | % (phases.phasenames[maxphase], maxsub) |
|
457 | 457 | ) |
|
458 | 458 | return maxphase |
|
459 | 459 | return commitphase |
General Comments 0
You need to be logged in to leave comments.
Login now