Show More
@@ -1,314 +1,311 b'' | |||||
1 | # config.py - configuration parsing for Mercurial |
|
1 | # config.py - configuration parsing for Mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2009 Matt Mackall <mpm@selenic.com> and others |
|
3 | # Copyright 2009 Matt Mackall <mpm@selenic.com> and others | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | import errno |
|
10 | import errno | |
11 | import os |
|
11 | import os | |
12 |
|
12 | |||
13 | from .i18n import _ |
|
13 | from .i18n import _ | |
14 | from .pycompat import getattr |
|
14 | from .pycompat import getattr | |
15 | from . import ( |
|
15 | from . import ( | |
16 | encoding, |
|
16 | encoding, | |
17 | error, |
|
17 | error, | |
18 | pycompat, |
|
18 | pycompat, | |
19 | util, |
|
19 | util, | |
20 | ) |
|
20 | ) | |
21 |
|
21 | |||
22 |
|
22 | |||
23 | class config(object): |
|
23 | class config(object): | |
24 | def __init__(self, data=None): |
|
24 | def __init__(self, data=None): | |
25 | self._data = {} |
|
25 | self._data = {} | |
26 | self._unset = [] |
|
26 | self._unset = [] | |
27 | if data: |
|
27 | if data: | |
28 | for k in data._data: |
|
28 | for k in data._data: | |
29 | self._data[k] = data[k].copy() |
|
29 | self._data[k] = data[k].copy() | |
30 | self._source = data._source.copy() |
|
30 | self._source = data._source.copy() | |
31 | else: |
|
31 | else: | |
32 | self._source = util.cowdict() |
|
32 | self._source = util.cowdict() | |
33 |
|
33 | |||
34 | def copy(self): |
|
34 | def copy(self): | |
35 | return config(self) |
|
35 | return config(self) | |
36 |
|
36 | |||
37 | def __contains__(self, section): |
|
37 | def __contains__(self, section): | |
38 | return section in self._data |
|
38 | return section in self._data | |
39 |
|
39 | |||
40 | def hasitem(self, section, item): |
|
40 | def hasitem(self, section, item): | |
41 | return item in self._data.get(section, {}) |
|
41 | return item in self._data.get(section, {}) | |
42 |
|
42 | |||
43 | def __getitem__(self, section): |
|
43 | def __getitem__(self, section): | |
44 | return self._data.get(section, {}) |
|
44 | return self._data.get(section, {}) | |
45 |
|
45 | |||
46 | def __iter__(self): |
|
46 | def __iter__(self): | |
47 | for d in self.sections(): |
|
47 | for d in self.sections(): | |
48 | yield d |
|
48 | yield d | |
49 |
|
49 | |||
50 | def update(self, src): |
|
50 | def update(self, src): | |
51 | self._source = self._source.preparewrite() |
|
51 | self._source = self._source.preparewrite() | |
52 | for s, n in src._unset: |
|
52 | for s, n in src._unset: | |
53 | ds = self._data.get(s, None) |
|
53 | ds = self._data.get(s, None) | |
54 | if ds is not None and n in ds: |
|
54 | if ds is not None and n in ds: | |
55 | self._data[s] = ds.preparewrite() |
|
55 | self._data[s] = ds.preparewrite() | |
56 | del self._data[s][n] |
|
56 | del self._data[s][n] | |
57 | del self._source[(s, n)] |
|
57 | del self._source[(s, n)] | |
58 | for s in src: |
|
58 | for s in src: | |
59 | ds = self._data.get(s, None) |
|
59 | ds = self._data.get(s, None) | |
60 | if ds: |
|
60 | if ds: | |
61 | self._data[s] = ds.preparewrite() |
|
61 | self._data[s] = ds.preparewrite() | |
62 | else: |
|
62 | else: | |
63 | self._data[s] = util.cowsortdict() |
|
63 | self._data[s] = util.cowsortdict() | |
64 | self._data[s].update(src._data[s]) |
|
64 | self._data[s].update(src._data[s]) | |
65 | self._source.update(src._source) |
|
65 | self._source.update(src._source) | |
66 |
|
66 | |||
67 | def get(self, section, item, default=None): |
|
67 | def get(self, section, item, default=None): | |
68 | return self._data.get(section, {}).get(item, default) |
|
68 | return self._data.get(section, {}).get(item, default) | |
69 |
|
69 | |||
70 | def backup(self, section, item): |
|
70 | def backup(self, section, item): | |
71 | """return a tuple allowing restore to reinstall a previous value |
|
71 | """return a tuple allowing restore to reinstall a previous value | |
72 |
|
72 | |||
73 | The main reason we need it is because it handles the "no data" case. |
|
73 | The main reason we need it is because it handles the "no data" case. | |
74 | """ |
|
74 | """ | |
75 | try: |
|
75 | try: | |
76 | value = self._data[section][item] |
|
76 | value = self._data[section][item] | |
77 | source = self.source(section, item) |
|
77 | source = self.source(section, item) | |
78 | return (section, item, value, source) |
|
78 | return (section, item, value, source) | |
79 | except KeyError: |
|
79 | except KeyError: | |
80 | return (section, item) |
|
80 | return (section, item) | |
81 |
|
81 | |||
82 | def source(self, section, item): |
|
82 | def source(self, section, item): | |
83 | return self._source.get((section, item), b"") |
|
83 | return self._source.get((section, item), b"") | |
84 |
|
84 | |||
85 | def sections(self): |
|
85 | def sections(self): | |
86 | return sorted(self._data.keys()) |
|
86 | return sorted(self._data.keys()) | |
87 |
|
87 | |||
88 | def items(self, section): |
|
88 | def items(self, section): | |
89 | return list(pycompat.iteritems(self._data.get(section, {}))) |
|
89 | return list(pycompat.iteritems(self._data.get(section, {}))) | |
90 |
|
90 | |||
91 | def set(self, section, item, value, source=b""): |
|
91 | def set(self, section, item, value, source=b""): | |
92 | if pycompat.ispy3: |
|
92 | if pycompat.ispy3: | |
93 | assert not isinstance( |
|
93 | assert not isinstance( | |
94 | section, str |
|
94 | section, str | |
95 | ), b'config section may not be unicode strings on Python 3' |
|
95 | ), b'config section may not be unicode strings on Python 3' | |
96 | assert not isinstance( |
|
96 | assert not isinstance( | |
97 | item, str |
|
97 | item, str | |
98 | ), b'config item may not be unicode strings on Python 3' |
|
98 | ), b'config item may not be unicode strings on Python 3' | |
99 | assert not isinstance( |
|
99 | assert not isinstance( | |
100 | value, str |
|
100 | value, str | |
101 | ), b'config values may not be unicode strings on Python 3' |
|
101 | ), b'config values may not be unicode strings on Python 3' | |
102 | if section not in self: |
|
102 | if section not in self: | |
103 | self._data[section] = util.cowsortdict() |
|
103 | self._data[section] = util.cowsortdict() | |
104 | else: |
|
104 | else: | |
105 | self._data[section] = self._data[section].preparewrite() |
|
105 | self._data[section] = self._data[section].preparewrite() | |
106 | self._data[section][item] = value |
|
106 | self._data[section][item] = value | |
107 | if source: |
|
107 | if source: | |
108 | self._source = self._source.preparewrite() |
|
108 | self._source = self._source.preparewrite() | |
109 | self._source[(section, item)] = source |
|
109 | self._source[(section, item)] = source | |
110 |
|
110 | |||
111 | def restore(self, data): |
|
111 | def restore(self, data): | |
112 | """restore data returned by self.backup""" |
|
112 | """restore data returned by self.backup""" | |
113 | self._source = self._source.preparewrite() |
|
113 | self._source = self._source.preparewrite() | |
114 | if len(data) == 4: |
|
114 | if len(data) == 4: | |
115 | # restore old data |
|
115 | # restore old data | |
116 | section, item, value, source = data |
|
116 | section, item, value, source = data | |
117 | self._data[section] = self._data[section].preparewrite() |
|
117 | self._data[section] = self._data[section].preparewrite() | |
118 | self._data[section][item] = value |
|
118 | self._data[section][item] = value | |
119 | self._source[(section, item)] = source |
|
119 | self._source[(section, item)] = source | |
120 | else: |
|
120 | else: | |
121 | # no data before, remove everything |
|
121 | # no data before, remove everything | |
122 | section, item = data |
|
122 | section, item = data | |
123 | if section in self._data: |
|
123 | if section in self._data: | |
124 | self._data[section].pop(item, None) |
|
124 | self._data[section].pop(item, None) | |
125 | self._source.pop((section, item), None) |
|
125 | self._source.pop((section, item), None) | |
126 |
|
126 | |||
127 | def parse(self, src, data, sections=None, remap=None, include=None): |
|
127 | def parse(self, src, data, sections=None, remap=None, include=None): | |
128 | sectionre = util.re.compile(br'\[([^\[]+)\]') |
|
128 | sectionre = util.re.compile(br'\[([^\[]+)\]') | |
129 | itemre = util.re.compile(br'([^=\s][^=]*?)\s*=\s*(.*\S|)') |
|
129 | itemre = util.re.compile(br'([^=\s][^=]*?)\s*=\s*(.*\S|)') | |
130 | contre = util.re.compile(br'\s+(\S|\S.*\S)\s*$') |
|
130 | contre = util.re.compile(br'\s+(\S|\S.*\S)\s*$') | |
131 | emptyre = util.re.compile(br'(;|#|\s*$)') |
|
131 | emptyre = util.re.compile(br'(;|#|\s*$)') | |
132 | commentre = util.re.compile(br'(;|#)') |
|
132 | commentre = util.re.compile(br'(;|#)') | |
133 | unsetre = util.re.compile(br'%unset\s+(\S+)') |
|
133 | unsetre = util.re.compile(br'%unset\s+(\S+)') | |
134 | includere = util.re.compile(br'%include\s+(\S|\S.*\S)\s*$') |
|
134 | includere = util.re.compile(br'%include\s+(\S|\S.*\S)\s*$') | |
135 | section = b"" |
|
135 | section = b"" | |
136 | item = None |
|
136 | item = None | |
137 | line = 0 |
|
137 | line = 0 | |
138 | cont = False |
|
138 | cont = False | |
139 |
|
139 | |||
140 | if remap: |
|
140 | if remap: | |
141 | section = remap.get(section, section) |
|
141 | section = remap.get(section, section) | |
142 |
|
142 | |||
143 | for l in data.splitlines(True): |
|
143 | for l in data.splitlines(True): | |
144 | line += 1 |
|
144 | line += 1 | |
145 | if line == 1 and l.startswith(b'\xef\xbb\xbf'): |
|
145 | if line == 1 and l.startswith(b'\xef\xbb\xbf'): | |
146 | # Someone set us up the BOM |
|
146 | # Someone set us up the BOM | |
147 | l = l[3:] |
|
147 | l = l[3:] | |
148 | if cont: |
|
148 | if cont: | |
149 | if commentre.match(l): |
|
149 | if commentre.match(l): | |
150 | continue |
|
150 | continue | |
151 | m = contre.match(l) |
|
151 | m = contre.match(l) | |
152 | if m: |
|
152 | if m: | |
153 | if sections and section not in sections: |
|
153 | if sections and section not in sections: | |
154 | continue |
|
154 | continue | |
155 | v = self.get(section, item) + b"\n" + m.group(1) |
|
155 | v = self.get(section, item) + b"\n" + m.group(1) | |
156 | self.set(section, item, v, b"%s:%d" % (src, line)) |
|
156 | self.set(section, item, v, b"%s:%d" % (src, line)) | |
157 | continue |
|
157 | continue | |
158 | item = None |
|
158 | item = None | |
159 | cont = False |
|
159 | cont = False | |
160 | m = includere.match(l) |
|
160 | m = includere.match(l) | |
161 |
|
161 | |||
162 | if m and include: |
|
162 | if m and include: | |
163 | expanded = util.expandpath(m.group(1)) |
|
163 | expanded = util.expandpath(m.group(1)) | |
164 | inc = os.path.normpath( |
|
|||
165 | os.path.join(os.path.dirname(src), expanded) |
|
|||
166 | ) |
|
|||
167 | try: |
|
164 | try: | |
168 |
include(expanded |
|
165 | include(expanded, remap=remap, sections=sections) | |
169 | except IOError as inst: |
|
166 | except IOError as inst: | |
170 | if inst.errno != errno.ENOENT: |
|
167 | if inst.errno != errno.ENOENT: | |
171 | raise error.ParseError( |
|
168 | raise error.ParseError( | |
172 | _(b"cannot include %s (%s)") |
|
169 | _(b"cannot include %s (%s)") | |
173 |
% ( |
|
170 | % (expanded, encoding.strtolocal(inst.strerror)), | |
174 | b"%s:%d" % (src, line), |
|
171 | b"%s:%d" % (src, line), | |
175 | ) |
|
172 | ) | |
176 | continue |
|
173 | continue | |
177 | if emptyre.match(l): |
|
174 | if emptyre.match(l): | |
178 | continue |
|
175 | continue | |
179 | m = sectionre.match(l) |
|
176 | m = sectionre.match(l) | |
180 | if m: |
|
177 | if m: | |
181 | section = m.group(1) |
|
178 | section = m.group(1) | |
182 | if remap: |
|
179 | if remap: | |
183 | section = remap.get(section, section) |
|
180 | section = remap.get(section, section) | |
184 | if section not in self: |
|
181 | if section not in self: | |
185 | self._data[section] = util.cowsortdict() |
|
182 | self._data[section] = util.cowsortdict() | |
186 | continue |
|
183 | continue | |
187 | m = itemre.match(l) |
|
184 | m = itemre.match(l) | |
188 | if m: |
|
185 | if m: | |
189 | item = m.group(1) |
|
186 | item = m.group(1) | |
190 | cont = True |
|
187 | cont = True | |
191 | if sections and section not in sections: |
|
188 | if sections and section not in sections: | |
192 | continue |
|
189 | continue | |
193 | self.set(section, item, m.group(2), b"%s:%d" % (src, line)) |
|
190 | self.set(section, item, m.group(2), b"%s:%d" % (src, line)) | |
194 | continue |
|
191 | continue | |
195 | m = unsetre.match(l) |
|
192 | m = unsetre.match(l) | |
196 | if m: |
|
193 | if m: | |
197 | name = m.group(1) |
|
194 | name = m.group(1) | |
198 | if sections and section not in sections: |
|
195 | if sections and section not in sections: | |
199 | continue |
|
196 | continue | |
200 | if self.get(section, name) is not None: |
|
197 | if self.get(section, name) is not None: | |
201 | self._data[section] = self._data[section].preparewrite() |
|
198 | self._data[section] = self._data[section].preparewrite() | |
202 | del self._data[section][name] |
|
199 | del self._data[section][name] | |
203 | self._unset.append((section, name)) |
|
200 | self._unset.append((section, name)) | |
204 | continue |
|
201 | continue | |
205 |
|
202 | |||
206 | raise error.ParseError(l.rstrip(), (b"%s:%d" % (src, line))) |
|
203 | raise error.ParseError(l.rstrip(), (b"%s:%d" % (src, line))) | |
207 |
|
204 | |||
208 | def read(self, path, fp=None, sections=None, remap=None): |
|
205 | def read(self, path, fp=None, sections=None, remap=None): | |
209 | if not fp: |
|
206 | if not fp: | |
210 | fp = util.posixfile(path, b'rb') |
|
207 | fp = util.posixfile(path, b'rb') | |
211 | assert getattr(fp, 'mode', 'rb') == 'rb', ( |
|
208 | assert getattr(fp, 'mode', 'rb') == 'rb', ( | |
212 | b'config files must be opened in binary mode, got fp=%r mode=%r' |
|
209 | b'config files must be opened in binary mode, got fp=%r mode=%r' | |
213 | % (fp, fp.mode,) |
|
210 | % (fp, fp.mode,) | |
214 | ) |
|
211 | ) | |
215 |
|
212 | |||
216 | dir = os.path.dirname(path) |
|
213 | dir = os.path.dirname(path) | |
217 |
|
214 | |||
218 |
def include(rel, |
|
215 | def include(rel, remap, sections): | |
219 | abs = os.path.normpath(os.path.join(dir, rel)) |
|
216 | abs = os.path.normpath(os.path.join(dir, rel)) | |
220 | self.read(abs, remap=remap, sections=sections) |
|
217 | self.read(abs, remap=remap, sections=sections) | |
221 |
|
218 | |||
222 | self.parse( |
|
219 | self.parse( | |
223 | path, fp.read(), sections=sections, remap=remap, include=include |
|
220 | path, fp.read(), sections=sections, remap=remap, include=include | |
224 | ) |
|
221 | ) | |
225 |
|
222 | |||
226 |
|
223 | |||
227 | def parselist(value): |
|
224 | def parselist(value): | |
228 | """parse a configuration value as a list of comma/space separated strings |
|
225 | """parse a configuration value as a list of comma/space separated strings | |
229 |
|
226 | |||
230 | >>> parselist(b'this,is "a small" ,test') |
|
227 | >>> parselist(b'this,is "a small" ,test') | |
231 | ['this', 'is', 'a small', 'test'] |
|
228 | ['this', 'is', 'a small', 'test'] | |
232 | """ |
|
229 | """ | |
233 |
|
230 | |||
234 | def _parse_plain(parts, s, offset): |
|
231 | def _parse_plain(parts, s, offset): | |
235 | whitespace = False |
|
232 | whitespace = False | |
236 | while offset < len(s) and ( |
|
233 | while offset < len(s) and ( | |
237 | s[offset : offset + 1].isspace() or s[offset : offset + 1] == b',' |
|
234 | s[offset : offset + 1].isspace() or s[offset : offset + 1] == b',' | |
238 | ): |
|
235 | ): | |
239 | whitespace = True |
|
236 | whitespace = True | |
240 | offset += 1 |
|
237 | offset += 1 | |
241 | if offset >= len(s): |
|
238 | if offset >= len(s): | |
242 | return None, parts, offset |
|
239 | return None, parts, offset | |
243 | if whitespace: |
|
240 | if whitespace: | |
244 | parts.append(b'') |
|
241 | parts.append(b'') | |
245 | if s[offset : offset + 1] == b'"' and not parts[-1]: |
|
242 | if s[offset : offset + 1] == b'"' and not parts[-1]: | |
246 | return _parse_quote, parts, offset + 1 |
|
243 | return _parse_quote, parts, offset + 1 | |
247 | elif s[offset : offset + 1] == b'"' and parts[-1][-1:] == b'\\': |
|
244 | elif s[offset : offset + 1] == b'"' and parts[-1][-1:] == b'\\': | |
248 | parts[-1] = parts[-1][:-1] + s[offset : offset + 1] |
|
245 | parts[-1] = parts[-1][:-1] + s[offset : offset + 1] | |
249 | return _parse_plain, parts, offset + 1 |
|
246 | return _parse_plain, parts, offset + 1 | |
250 | parts[-1] += s[offset : offset + 1] |
|
247 | parts[-1] += s[offset : offset + 1] | |
251 | return _parse_plain, parts, offset + 1 |
|
248 | return _parse_plain, parts, offset + 1 | |
252 |
|
249 | |||
253 | def _parse_quote(parts, s, offset): |
|
250 | def _parse_quote(parts, s, offset): | |
254 | if offset < len(s) and s[offset : offset + 1] == b'"': # "" |
|
251 | if offset < len(s) and s[offset : offset + 1] == b'"': # "" | |
255 | parts.append(b'') |
|
252 | parts.append(b'') | |
256 | offset += 1 |
|
253 | offset += 1 | |
257 | while offset < len(s) and ( |
|
254 | while offset < len(s) and ( | |
258 | s[offset : offset + 1].isspace() |
|
255 | s[offset : offset + 1].isspace() | |
259 | or s[offset : offset + 1] == b',' |
|
256 | or s[offset : offset + 1] == b',' | |
260 | ): |
|
257 | ): | |
261 | offset += 1 |
|
258 | offset += 1 | |
262 | return _parse_plain, parts, offset |
|
259 | return _parse_plain, parts, offset | |
263 |
|
260 | |||
264 | while offset < len(s) and s[offset : offset + 1] != b'"': |
|
261 | while offset < len(s) and s[offset : offset + 1] != b'"': | |
265 | if ( |
|
262 | if ( | |
266 | s[offset : offset + 1] == b'\\' |
|
263 | s[offset : offset + 1] == b'\\' | |
267 | and offset + 1 < len(s) |
|
264 | and offset + 1 < len(s) | |
268 | and s[offset + 1 : offset + 2] == b'"' |
|
265 | and s[offset + 1 : offset + 2] == b'"' | |
269 | ): |
|
266 | ): | |
270 | offset += 1 |
|
267 | offset += 1 | |
271 | parts[-1] += b'"' |
|
268 | parts[-1] += b'"' | |
272 | else: |
|
269 | else: | |
273 | parts[-1] += s[offset : offset + 1] |
|
270 | parts[-1] += s[offset : offset + 1] | |
274 | offset += 1 |
|
271 | offset += 1 | |
275 |
|
272 | |||
276 | if offset >= len(s): |
|
273 | if offset >= len(s): | |
277 | real_parts = _configlist(parts[-1]) |
|
274 | real_parts = _configlist(parts[-1]) | |
278 | if not real_parts: |
|
275 | if not real_parts: | |
279 | parts[-1] = b'"' |
|
276 | parts[-1] = b'"' | |
280 | else: |
|
277 | else: | |
281 | real_parts[0] = b'"' + real_parts[0] |
|
278 | real_parts[0] = b'"' + real_parts[0] | |
282 | parts = parts[:-1] |
|
279 | parts = parts[:-1] | |
283 | parts.extend(real_parts) |
|
280 | parts.extend(real_parts) | |
284 | return None, parts, offset |
|
281 | return None, parts, offset | |
285 |
|
282 | |||
286 | offset += 1 |
|
283 | offset += 1 | |
287 | while offset < len(s) and s[offset : offset + 1] in [b' ', b',']: |
|
284 | while offset < len(s) and s[offset : offset + 1] in [b' ', b',']: | |
288 | offset += 1 |
|
285 | offset += 1 | |
289 |
|
286 | |||
290 | if offset < len(s): |
|
287 | if offset < len(s): | |
291 | if offset + 1 == len(s) and s[offset : offset + 1] == b'"': |
|
288 | if offset + 1 == len(s) and s[offset : offset + 1] == b'"': | |
292 | parts[-1] += b'"' |
|
289 | parts[-1] += b'"' | |
293 | offset += 1 |
|
290 | offset += 1 | |
294 | else: |
|
291 | else: | |
295 | parts.append(b'') |
|
292 | parts.append(b'') | |
296 | else: |
|
293 | else: | |
297 | return None, parts, offset |
|
294 | return None, parts, offset | |
298 |
|
295 | |||
299 | return _parse_plain, parts, offset |
|
296 | return _parse_plain, parts, offset | |
300 |
|
297 | |||
301 | def _configlist(s): |
|
298 | def _configlist(s): | |
302 | s = s.rstrip(b' ,') |
|
299 | s = s.rstrip(b' ,') | |
303 | if not s: |
|
300 | if not s: | |
304 | return [] |
|
301 | return [] | |
305 | parser, parts, offset = _parse_plain, [b''], 0 |
|
302 | parser, parts, offset = _parse_plain, [b''], 0 | |
306 | while parser: |
|
303 | while parser: | |
307 | parser, parts, offset = parser(parts, s, offset) |
|
304 | parser, parts, offset = parser(parts, s, offset) | |
308 | return parts |
|
305 | return parts | |
309 |
|
306 | |||
310 | if value is not None and isinstance(value, bytes): |
|
307 | if value is not None and isinstance(value, bytes): | |
311 | result = _configlist(value.lstrip(b' ,\n')) |
|
308 | result = _configlist(value.lstrip(b' ,\n')) | |
312 | else: |
|
309 | else: | |
313 | result = value |
|
310 | result = value | |
314 | return result or [] |
|
311 | return result or [] |
@@ -1,459 +1,459 b'' | |||||
1 | # subrepoutil.py - sub-repository operations and substate handling |
|
1 | # subrepoutil.py - sub-repository operations and substate handling | |
2 | # |
|
2 | # | |
3 | # Copyright 2009-2010 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2009-2010 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | import errno |
|
10 | import errno | |
11 | import os |
|
11 | import os | |
12 | import posixpath |
|
12 | import posixpath | |
13 | import re |
|
13 | import re | |
14 |
|
14 | |||
15 | from .i18n import _ |
|
15 | from .i18n import _ | |
16 | from .pycompat import getattr |
|
16 | from .pycompat import getattr | |
17 | from . import ( |
|
17 | from . import ( | |
18 | config, |
|
18 | config, | |
19 | error, |
|
19 | error, | |
20 | filemerge, |
|
20 | filemerge, | |
21 | pathutil, |
|
21 | pathutil, | |
22 | phases, |
|
22 | phases, | |
23 | pycompat, |
|
23 | pycompat, | |
24 | util, |
|
24 | util, | |
25 | ) |
|
25 | ) | |
26 | from .utils import stringutil |
|
26 | from .utils import stringutil | |
27 |
|
27 | |||
28 | nullstate = (b'', b'', b'empty') |
|
28 | nullstate = (b'', b'', b'empty') | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | def state(ctx, ui): |
|
31 | def state(ctx, ui): | |
32 | """return a state dict, mapping subrepo paths configured in .hgsub |
|
32 | """return a state dict, mapping subrepo paths configured in .hgsub | |
33 | to tuple: (source from .hgsub, revision from .hgsubstate, kind |
|
33 | to tuple: (source from .hgsub, revision from .hgsubstate, kind | |
34 | (key in types dict)) |
|
34 | (key in types dict)) | |
35 | """ |
|
35 | """ | |
36 | p = config.config() |
|
36 | p = config.config() | |
37 | repo = ctx.repo() |
|
37 | repo = ctx.repo() | |
38 |
|
38 | |||
39 |
def read(f |
|
39 | def read(f, sections=None, remap=None): | |
40 | if f in ctx: |
|
40 | if f in ctx: | |
41 | try: |
|
41 | try: | |
42 | data = ctx[f].data() |
|
42 | data = ctx[f].data() | |
43 | except IOError as err: |
|
43 | except IOError as err: | |
44 | if err.errno != errno.ENOENT: |
|
44 | if err.errno != errno.ENOENT: | |
45 | raise |
|
45 | raise | |
46 | # handle missing subrepo spec files as removed |
|
46 | # handle missing subrepo spec files as removed | |
47 | ui.warn( |
|
47 | ui.warn( | |
48 | _(b"warning: subrepo spec file \'%s\' not found\n") |
|
48 | _(b"warning: subrepo spec file \'%s\' not found\n") | |
49 | % repo.pathto(f) |
|
49 | % repo.pathto(f) | |
50 | ) |
|
50 | ) | |
51 | return |
|
51 | return | |
52 | p.parse(f, data, sections, remap, read) |
|
52 | p.parse(f, data, sections, remap, read) | |
53 | else: |
|
53 | else: | |
54 | raise error.Abort( |
|
54 | raise error.Abort( | |
55 | _(b"subrepo spec file \'%s\' not found") % repo.pathto(f) |
|
55 | _(b"subrepo spec file \'%s\' not found") % repo.pathto(f) | |
56 | ) |
|
56 | ) | |
57 |
|
57 | |||
58 | if b'.hgsub' in ctx: |
|
58 | if b'.hgsub' in ctx: | |
59 |
read(b'.hgsub' |
|
59 | read(b'.hgsub') | |
60 |
|
60 | |||
61 | for path, src in ui.configitems(b'subpaths'): |
|
61 | for path, src in ui.configitems(b'subpaths'): | |
62 | p.set(b'subpaths', path, src, ui.configsource(b'subpaths', path)) |
|
62 | p.set(b'subpaths', path, src, ui.configsource(b'subpaths', path)) | |
63 |
|
63 | |||
64 | rev = {} |
|
64 | rev = {} | |
65 | if b'.hgsubstate' in ctx: |
|
65 | if b'.hgsubstate' in ctx: | |
66 | try: |
|
66 | try: | |
67 | for i, l in enumerate(ctx[b'.hgsubstate'].data().splitlines()): |
|
67 | for i, l in enumerate(ctx[b'.hgsubstate'].data().splitlines()): | |
68 | l = l.lstrip() |
|
68 | l = l.lstrip() | |
69 | if not l: |
|
69 | if not l: | |
70 | continue |
|
70 | continue | |
71 | try: |
|
71 | try: | |
72 | revision, path = l.split(b" ", 1) |
|
72 | revision, path = l.split(b" ", 1) | |
73 | except ValueError: |
|
73 | except ValueError: | |
74 | raise error.Abort( |
|
74 | raise error.Abort( | |
75 | _( |
|
75 | _( | |
76 | b"invalid subrepository revision " |
|
76 | b"invalid subrepository revision " | |
77 | b"specifier in \'%s\' line %d" |
|
77 | b"specifier in \'%s\' line %d" | |
78 | ) |
|
78 | ) | |
79 | % (repo.pathto(b'.hgsubstate'), (i + 1)) |
|
79 | % (repo.pathto(b'.hgsubstate'), (i + 1)) | |
80 | ) |
|
80 | ) | |
81 | rev[path] = revision |
|
81 | rev[path] = revision | |
82 | except IOError as err: |
|
82 | except IOError as err: | |
83 | if err.errno != errno.ENOENT: |
|
83 | if err.errno != errno.ENOENT: | |
84 | raise |
|
84 | raise | |
85 |
|
85 | |||
86 | def remap(src): |
|
86 | def remap(src): | |
87 | for pattern, repl in p.items(b'subpaths'): |
|
87 | for pattern, repl in p.items(b'subpaths'): | |
88 | # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub |
|
88 | # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub | |
89 | # does a string decode. |
|
89 | # does a string decode. | |
90 | repl = stringutil.escapestr(repl) |
|
90 | repl = stringutil.escapestr(repl) | |
91 | # However, we still want to allow back references to go |
|
91 | # However, we still want to allow back references to go | |
92 | # through unharmed, so we turn r'\\1' into r'\1'. Again, |
|
92 | # through unharmed, so we turn r'\\1' into r'\1'. Again, | |
93 | # extra escapes are needed because re.sub string decodes. |
|
93 | # extra escapes are needed because re.sub string decodes. | |
94 | repl = re.sub(br'\\\\([0-9]+)', br'\\\1', repl) |
|
94 | repl = re.sub(br'\\\\([0-9]+)', br'\\\1', repl) | |
95 | try: |
|
95 | try: | |
96 | src = re.sub(pattern, repl, src, 1) |
|
96 | src = re.sub(pattern, repl, src, 1) | |
97 | except re.error as e: |
|
97 | except re.error as e: | |
98 | raise error.Abort( |
|
98 | raise error.Abort( | |
99 | _(b"bad subrepository pattern in %s: %s") |
|
99 | _(b"bad subrepository pattern in %s: %s") | |
100 | % ( |
|
100 | % ( | |
101 | p.source(b'subpaths', pattern), |
|
101 | p.source(b'subpaths', pattern), | |
102 | stringutil.forcebytestr(e), |
|
102 | stringutil.forcebytestr(e), | |
103 | ) |
|
103 | ) | |
104 | ) |
|
104 | ) | |
105 | return src |
|
105 | return src | |
106 |
|
106 | |||
107 | state = {} |
|
107 | state = {} | |
108 | for path, src in p[b''].items(): |
|
108 | for path, src in p[b''].items(): | |
109 | kind = b'hg' |
|
109 | kind = b'hg' | |
110 | if src.startswith(b'['): |
|
110 | if src.startswith(b'['): | |
111 | if b']' not in src: |
|
111 | if b']' not in src: | |
112 | raise error.Abort(_(b'missing ] in subrepository source')) |
|
112 | raise error.Abort(_(b'missing ] in subrepository source')) | |
113 | kind, src = src.split(b']', 1) |
|
113 | kind, src = src.split(b']', 1) | |
114 | kind = kind[1:] |
|
114 | kind = kind[1:] | |
115 | src = src.lstrip() # strip any extra whitespace after ']' |
|
115 | src = src.lstrip() # strip any extra whitespace after ']' | |
116 |
|
116 | |||
117 | if not util.url(src).isabs(): |
|
117 | if not util.url(src).isabs(): | |
118 | parent = _abssource(repo, abort=False) |
|
118 | parent = _abssource(repo, abort=False) | |
119 | if parent: |
|
119 | if parent: | |
120 | parent = util.url(parent) |
|
120 | parent = util.url(parent) | |
121 | parent.path = posixpath.join(parent.path or b'', src) |
|
121 | parent.path = posixpath.join(parent.path or b'', src) | |
122 | parent.path = posixpath.normpath(parent.path) |
|
122 | parent.path = posixpath.normpath(parent.path) | |
123 | joined = bytes(parent) |
|
123 | joined = bytes(parent) | |
124 | # Remap the full joined path and use it if it changes, |
|
124 | # Remap the full joined path and use it if it changes, | |
125 | # else remap the original source. |
|
125 | # else remap the original source. | |
126 | remapped = remap(joined) |
|
126 | remapped = remap(joined) | |
127 | if remapped == joined: |
|
127 | if remapped == joined: | |
128 | src = remap(src) |
|
128 | src = remap(src) | |
129 | else: |
|
129 | else: | |
130 | src = remapped |
|
130 | src = remapped | |
131 |
|
131 | |||
132 | src = remap(src) |
|
132 | src = remap(src) | |
133 | state[util.pconvert(path)] = (src.strip(), rev.get(path, b''), kind) |
|
133 | state[util.pconvert(path)] = (src.strip(), rev.get(path, b''), kind) | |
134 |
|
134 | |||
135 | return state |
|
135 | return state | |
136 |
|
136 | |||
137 |
|
137 | |||
138 | def writestate(repo, state): |
|
138 | def writestate(repo, state): | |
139 | """rewrite .hgsubstate in (outer) repo with these subrepo states""" |
|
139 | """rewrite .hgsubstate in (outer) repo with these subrepo states""" | |
140 | lines = [ |
|
140 | lines = [ | |
141 | b'%s %s\n' % (state[s][1], s) |
|
141 | b'%s %s\n' % (state[s][1], s) | |
142 | for s in sorted(state) |
|
142 | for s in sorted(state) | |
143 | if state[s][1] != nullstate[1] |
|
143 | if state[s][1] != nullstate[1] | |
144 | ] |
|
144 | ] | |
145 | repo.wwrite(b'.hgsubstate', b''.join(lines), b'') |
|
145 | repo.wwrite(b'.hgsubstate', b''.join(lines), b'') | |
146 |
|
146 | |||
147 |
|
147 | |||
148 | def submerge(repo, wctx, mctx, actx, overwrite, labels=None): |
|
148 | def submerge(repo, wctx, mctx, actx, overwrite, labels=None): | |
149 | """delegated from merge.applyupdates: merging of .hgsubstate file |
|
149 | """delegated from merge.applyupdates: merging of .hgsubstate file | |
150 | in working context, merging context and ancestor context""" |
|
150 | in working context, merging context and ancestor context""" | |
151 | if mctx == actx: # backwards? |
|
151 | if mctx == actx: # backwards? | |
152 | actx = wctx.p1() |
|
152 | actx = wctx.p1() | |
153 | s1 = wctx.substate |
|
153 | s1 = wctx.substate | |
154 | s2 = mctx.substate |
|
154 | s2 = mctx.substate | |
155 | sa = actx.substate |
|
155 | sa = actx.substate | |
156 | sm = {} |
|
156 | sm = {} | |
157 |
|
157 | |||
158 | repo.ui.debug(b"subrepo merge %s %s %s\n" % (wctx, mctx, actx)) |
|
158 | repo.ui.debug(b"subrepo merge %s %s %s\n" % (wctx, mctx, actx)) | |
159 |
|
159 | |||
160 | def debug(s, msg, r=b""): |
|
160 | def debug(s, msg, r=b""): | |
161 | if r: |
|
161 | if r: | |
162 | r = b"%s:%s:%s" % r |
|
162 | r = b"%s:%s:%s" % r | |
163 | repo.ui.debug(b" subrepo %s: %s %s\n" % (s, msg, r)) |
|
163 | repo.ui.debug(b" subrepo %s: %s %s\n" % (s, msg, r)) | |
164 |
|
164 | |||
165 | promptssrc = filemerge.partextras(labels) |
|
165 | promptssrc = filemerge.partextras(labels) | |
166 | for s, l in sorted(pycompat.iteritems(s1)): |
|
166 | for s, l in sorted(pycompat.iteritems(s1)): | |
167 | a = sa.get(s, nullstate) |
|
167 | a = sa.get(s, nullstate) | |
168 | ld = l # local state with possible dirty flag for compares |
|
168 | ld = l # local state with possible dirty flag for compares | |
169 | if wctx.sub(s).dirty(): |
|
169 | if wctx.sub(s).dirty(): | |
170 | ld = (l[0], l[1] + b"+") |
|
170 | ld = (l[0], l[1] + b"+") | |
171 | if wctx == actx: # overwrite |
|
171 | if wctx == actx: # overwrite | |
172 | a = ld |
|
172 | a = ld | |
173 |
|
173 | |||
174 | prompts = promptssrc.copy() |
|
174 | prompts = promptssrc.copy() | |
175 | prompts[b's'] = s |
|
175 | prompts[b's'] = s | |
176 | if s in s2: |
|
176 | if s in s2: | |
177 | r = s2[s] |
|
177 | r = s2[s] | |
178 | if ld == r or r == a: # no change or local is newer |
|
178 | if ld == r or r == a: # no change or local is newer | |
179 | sm[s] = l |
|
179 | sm[s] = l | |
180 | continue |
|
180 | continue | |
181 | elif ld == a: # other side changed |
|
181 | elif ld == a: # other side changed | |
182 | debug(s, b"other changed, get", r) |
|
182 | debug(s, b"other changed, get", r) | |
183 | wctx.sub(s).get(r, overwrite) |
|
183 | wctx.sub(s).get(r, overwrite) | |
184 | sm[s] = r |
|
184 | sm[s] = r | |
185 | elif ld[0] != r[0]: # sources differ |
|
185 | elif ld[0] != r[0]: # sources differ | |
186 | prompts[b'lo'] = l[0] |
|
186 | prompts[b'lo'] = l[0] | |
187 | prompts[b'ro'] = r[0] |
|
187 | prompts[b'ro'] = r[0] | |
188 | if repo.ui.promptchoice( |
|
188 | if repo.ui.promptchoice( | |
189 | _( |
|
189 | _( | |
190 | b' subrepository sources for %(s)s differ\n' |
|
190 | b' subrepository sources for %(s)s differ\n' | |
191 | b'you can use (l)ocal%(l)s source (%(lo)s)' |
|
191 | b'you can use (l)ocal%(l)s source (%(lo)s)' | |
192 | b' or (r)emote%(o)s source (%(ro)s).\n' |
|
192 | b' or (r)emote%(o)s source (%(ro)s).\n' | |
193 | b'what do you want to do?' |
|
193 | b'what do you want to do?' | |
194 | b'$$ &Local $$ &Remote' |
|
194 | b'$$ &Local $$ &Remote' | |
195 | ) |
|
195 | ) | |
196 | % prompts, |
|
196 | % prompts, | |
197 | 0, |
|
197 | 0, | |
198 | ): |
|
198 | ): | |
199 | debug(s, b"prompt changed, get", r) |
|
199 | debug(s, b"prompt changed, get", r) | |
200 | wctx.sub(s).get(r, overwrite) |
|
200 | wctx.sub(s).get(r, overwrite) | |
201 | sm[s] = r |
|
201 | sm[s] = r | |
202 | elif ld[1] == a[1]: # local side is unchanged |
|
202 | elif ld[1] == a[1]: # local side is unchanged | |
203 | debug(s, b"other side changed, get", r) |
|
203 | debug(s, b"other side changed, get", r) | |
204 | wctx.sub(s).get(r, overwrite) |
|
204 | wctx.sub(s).get(r, overwrite) | |
205 | sm[s] = r |
|
205 | sm[s] = r | |
206 | else: |
|
206 | else: | |
207 | debug(s, b"both sides changed") |
|
207 | debug(s, b"both sides changed") | |
208 | srepo = wctx.sub(s) |
|
208 | srepo = wctx.sub(s) | |
209 | prompts[b'sl'] = srepo.shortid(l[1]) |
|
209 | prompts[b'sl'] = srepo.shortid(l[1]) | |
210 | prompts[b'sr'] = srepo.shortid(r[1]) |
|
210 | prompts[b'sr'] = srepo.shortid(r[1]) | |
211 | option = repo.ui.promptchoice( |
|
211 | option = repo.ui.promptchoice( | |
212 | _( |
|
212 | _( | |
213 | b' subrepository %(s)s diverged (local revision: %(sl)s, ' |
|
213 | b' subrepository %(s)s diverged (local revision: %(sl)s, ' | |
214 | b'remote revision: %(sr)s)\n' |
|
214 | b'remote revision: %(sr)s)\n' | |
215 | b'you can (m)erge, keep (l)ocal%(l)s or keep ' |
|
215 | b'you can (m)erge, keep (l)ocal%(l)s or keep ' | |
216 | b'(r)emote%(o)s.\n' |
|
216 | b'(r)emote%(o)s.\n' | |
217 | b'what do you want to do?' |
|
217 | b'what do you want to do?' | |
218 | b'$$ &Merge $$ &Local $$ &Remote' |
|
218 | b'$$ &Merge $$ &Local $$ &Remote' | |
219 | ) |
|
219 | ) | |
220 | % prompts, |
|
220 | % prompts, | |
221 | 0, |
|
221 | 0, | |
222 | ) |
|
222 | ) | |
223 | if option == 0: |
|
223 | if option == 0: | |
224 | wctx.sub(s).merge(r) |
|
224 | wctx.sub(s).merge(r) | |
225 | sm[s] = l |
|
225 | sm[s] = l | |
226 | debug(s, b"merge with", r) |
|
226 | debug(s, b"merge with", r) | |
227 | elif option == 1: |
|
227 | elif option == 1: | |
228 | sm[s] = l |
|
228 | sm[s] = l | |
229 | debug(s, b"keep local subrepo revision", l) |
|
229 | debug(s, b"keep local subrepo revision", l) | |
230 | else: |
|
230 | else: | |
231 | wctx.sub(s).get(r, overwrite) |
|
231 | wctx.sub(s).get(r, overwrite) | |
232 | sm[s] = r |
|
232 | sm[s] = r | |
233 | debug(s, b"get remote subrepo revision", r) |
|
233 | debug(s, b"get remote subrepo revision", r) | |
234 | elif ld == a: # remote removed, local unchanged |
|
234 | elif ld == a: # remote removed, local unchanged | |
235 | debug(s, b"remote removed, remove") |
|
235 | debug(s, b"remote removed, remove") | |
236 | wctx.sub(s).remove() |
|
236 | wctx.sub(s).remove() | |
237 | elif a == nullstate: # not present in remote or ancestor |
|
237 | elif a == nullstate: # not present in remote or ancestor | |
238 | debug(s, b"local added, keep") |
|
238 | debug(s, b"local added, keep") | |
239 | sm[s] = l |
|
239 | sm[s] = l | |
240 | continue |
|
240 | continue | |
241 | else: |
|
241 | else: | |
242 | if repo.ui.promptchoice( |
|
242 | if repo.ui.promptchoice( | |
243 | _( |
|
243 | _( | |
244 | b' local%(l)s changed subrepository %(s)s' |
|
244 | b' local%(l)s changed subrepository %(s)s' | |
245 | b' which remote%(o)s removed\n' |
|
245 | b' which remote%(o)s removed\n' | |
246 | b'use (c)hanged version or (d)elete?' |
|
246 | b'use (c)hanged version or (d)elete?' | |
247 | b'$$ &Changed $$ &Delete' |
|
247 | b'$$ &Changed $$ &Delete' | |
248 | ) |
|
248 | ) | |
249 | % prompts, |
|
249 | % prompts, | |
250 | 0, |
|
250 | 0, | |
251 | ): |
|
251 | ): | |
252 | debug(s, b"prompt remove") |
|
252 | debug(s, b"prompt remove") | |
253 | wctx.sub(s).remove() |
|
253 | wctx.sub(s).remove() | |
254 |
|
254 | |||
255 | for s, r in sorted(s2.items()): |
|
255 | for s, r in sorted(s2.items()): | |
256 | if s in s1: |
|
256 | if s in s1: | |
257 | continue |
|
257 | continue | |
258 | elif s not in sa: |
|
258 | elif s not in sa: | |
259 | debug(s, b"remote added, get", r) |
|
259 | debug(s, b"remote added, get", r) | |
260 | mctx.sub(s).get(r) |
|
260 | mctx.sub(s).get(r) | |
261 | sm[s] = r |
|
261 | sm[s] = r | |
262 | elif r != sa[s]: |
|
262 | elif r != sa[s]: | |
263 | prompts = promptssrc.copy() |
|
263 | prompts = promptssrc.copy() | |
264 | prompts[b's'] = s |
|
264 | prompts[b's'] = s | |
265 | if ( |
|
265 | if ( | |
266 | repo.ui.promptchoice( |
|
266 | repo.ui.promptchoice( | |
267 | _( |
|
267 | _( | |
268 | b' remote%(o)s changed subrepository %(s)s' |
|
268 | b' remote%(o)s changed subrepository %(s)s' | |
269 | b' which local%(l)s removed\n' |
|
269 | b' which local%(l)s removed\n' | |
270 | b'use (c)hanged version or (d)elete?' |
|
270 | b'use (c)hanged version or (d)elete?' | |
271 | b'$$ &Changed $$ &Delete' |
|
271 | b'$$ &Changed $$ &Delete' | |
272 | ) |
|
272 | ) | |
273 | % prompts, |
|
273 | % prompts, | |
274 | 0, |
|
274 | 0, | |
275 | ) |
|
275 | ) | |
276 | == 0 |
|
276 | == 0 | |
277 | ): |
|
277 | ): | |
278 | debug(s, b"prompt recreate", r) |
|
278 | debug(s, b"prompt recreate", r) | |
279 | mctx.sub(s).get(r) |
|
279 | mctx.sub(s).get(r) | |
280 | sm[s] = r |
|
280 | sm[s] = r | |
281 |
|
281 | |||
282 | # record merged .hgsubstate |
|
282 | # record merged .hgsubstate | |
283 | writestate(repo, sm) |
|
283 | writestate(repo, sm) | |
284 | return sm |
|
284 | return sm | |
285 |
|
285 | |||
286 |
|
286 | |||
287 | def precommit(ui, wctx, status, match, force=False): |
|
287 | def precommit(ui, wctx, status, match, force=False): | |
288 | """Calculate .hgsubstate changes that should be applied before committing |
|
288 | """Calculate .hgsubstate changes that should be applied before committing | |
289 |
|
289 | |||
290 | Returns (subs, commitsubs, newstate) where |
|
290 | Returns (subs, commitsubs, newstate) where | |
291 | - subs: changed subrepos (including dirty ones) |
|
291 | - subs: changed subrepos (including dirty ones) | |
292 | - commitsubs: dirty subrepos which the caller needs to commit recursively |
|
292 | - commitsubs: dirty subrepos which the caller needs to commit recursively | |
293 | - newstate: new state dict which the caller must write to .hgsubstate |
|
293 | - newstate: new state dict which the caller must write to .hgsubstate | |
294 |
|
294 | |||
295 | This also updates the given status argument. |
|
295 | This also updates the given status argument. | |
296 | """ |
|
296 | """ | |
297 | subs = [] |
|
297 | subs = [] | |
298 | commitsubs = set() |
|
298 | commitsubs = set() | |
299 | newstate = wctx.substate.copy() |
|
299 | newstate = wctx.substate.copy() | |
300 |
|
300 | |||
301 | # only manage subrepos and .hgsubstate if .hgsub is present |
|
301 | # only manage subrepos and .hgsubstate if .hgsub is present | |
302 | if b'.hgsub' in wctx: |
|
302 | if b'.hgsub' in wctx: | |
303 | # we'll decide whether to track this ourselves, thanks |
|
303 | # we'll decide whether to track this ourselves, thanks | |
304 | for c in status.modified, status.added, status.removed: |
|
304 | for c in status.modified, status.added, status.removed: | |
305 | if b'.hgsubstate' in c: |
|
305 | if b'.hgsubstate' in c: | |
306 | c.remove(b'.hgsubstate') |
|
306 | c.remove(b'.hgsubstate') | |
307 |
|
307 | |||
308 | # compare current state to last committed state |
|
308 | # compare current state to last committed state | |
309 | # build new substate based on last committed state |
|
309 | # build new substate based on last committed state | |
310 | oldstate = wctx.p1().substate |
|
310 | oldstate = wctx.p1().substate | |
311 | for s in sorted(newstate.keys()): |
|
311 | for s in sorted(newstate.keys()): | |
312 | if not match(s): |
|
312 | if not match(s): | |
313 | # ignore working copy, use old state if present |
|
313 | # ignore working copy, use old state if present | |
314 | if s in oldstate: |
|
314 | if s in oldstate: | |
315 | newstate[s] = oldstate[s] |
|
315 | newstate[s] = oldstate[s] | |
316 | continue |
|
316 | continue | |
317 | if not force: |
|
317 | if not force: | |
318 | raise error.Abort( |
|
318 | raise error.Abort( | |
319 | _(b"commit with new subrepo %s excluded") % s |
|
319 | _(b"commit with new subrepo %s excluded") % s | |
320 | ) |
|
320 | ) | |
321 | dirtyreason = wctx.sub(s).dirtyreason(True) |
|
321 | dirtyreason = wctx.sub(s).dirtyreason(True) | |
322 | if dirtyreason: |
|
322 | if dirtyreason: | |
323 | if not ui.configbool(b'ui', b'commitsubrepos'): |
|
323 | if not ui.configbool(b'ui', b'commitsubrepos'): | |
324 | raise error.Abort( |
|
324 | raise error.Abort( | |
325 | dirtyreason, |
|
325 | dirtyreason, | |
326 | hint=_(b"use --subrepos for recursive commit"), |
|
326 | hint=_(b"use --subrepos for recursive commit"), | |
327 | ) |
|
327 | ) | |
328 | subs.append(s) |
|
328 | subs.append(s) | |
329 | commitsubs.add(s) |
|
329 | commitsubs.add(s) | |
330 | else: |
|
330 | else: | |
331 | bs = wctx.sub(s).basestate() |
|
331 | bs = wctx.sub(s).basestate() | |
332 | newstate[s] = (newstate[s][0], bs, newstate[s][2]) |
|
332 | newstate[s] = (newstate[s][0], bs, newstate[s][2]) | |
333 | if oldstate.get(s, (None, None, None))[1] != bs: |
|
333 | if oldstate.get(s, (None, None, None))[1] != bs: | |
334 | subs.append(s) |
|
334 | subs.append(s) | |
335 |
|
335 | |||
336 | # check for removed subrepos |
|
336 | # check for removed subrepos | |
337 | for p in wctx.parents(): |
|
337 | for p in wctx.parents(): | |
338 | r = [s for s in p.substate if s not in newstate] |
|
338 | r = [s for s in p.substate if s not in newstate] | |
339 | subs += [s for s in r if match(s)] |
|
339 | subs += [s for s in r if match(s)] | |
340 | if subs: |
|
340 | if subs: | |
341 | if not match(b'.hgsub') and b'.hgsub' in ( |
|
341 | if not match(b'.hgsub') and b'.hgsub' in ( | |
342 | wctx.modified() + wctx.added() |
|
342 | wctx.modified() + wctx.added() | |
343 | ): |
|
343 | ): | |
344 | raise error.Abort(_(b"can't commit subrepos without .hgsub")) |
|
344 | raise error.Abort(_(b"can't commit subrepos without .hgsub")) | |
345 | status.modified.insert(0, b'.hgsubstate') |
|
345 | status.modified.insert(0, b'.hgsubstate') | |
346 |
|
346 | |||
347 | elif b'.hgsub' in status.removed: |
|
347 | elif b'.hgsub' in status.removed: | |
348 | # clean up .hgsubstate when .hgsub is removed |
|
348 | # clean up .hgsubstate when .hgsub is removed | |
349 | if b'.hgsubstate' in wctx and b'.hgsubstate' not in ( |
|
349 | if b'.hgsubstate' in wctx and b'.hgsubstate' not in ( | |
350 | status.modified + status.added + status.removed |
|
350 | status.modified + status.added + status.removed | |
351 | ): |
|
351 | ): | |
352 | status.removed.insert(0, b'.hgsubstate') |
|
352 | status.removed.insert(0, b'.hgsubstate') | |
353 |
|
353 | |||
354 | return subs, commitsubs, newstate |
|
354 | return subs, commitsubs, newstate | |
355 |
|
355 | |||
356 |
|
356 | |||
357 | def reporelpath(repo): |
|
357 | def reporelpath(repo): | |
358 | """return path to this (sub)repo as seen from outermost repo""" |
|
358 | """return path to this (sub)repo as seen from outermost repo""" | |
359 | parent = repo |
|
359 | parent = repo | |
360 | while util.safehasattr(parent, b'_subparent'): |
|
360 | while util.safehasattr(parent, b'_subparent'): | |
361 | parent = parent._subparent |
|
361 | parent = parent._subparent | |
362 | return repo.root[len(pathutil.normasprefix(parent.root)) :] |
|
362 | return repo.root[len(pathutil.normasprefix(parent.root)) :] | |
363 |
|
363 | |||
364 |
|
364 | |||
365 | def subrelpath(sub): |
|
365 | def subrelpath(sub): | |
366 | """return path to this subrepo as seen from outermost repo""" |
|
366 | """return path to this subrepo as seen from outermost repo""" | |
367 | return sub._relpath |
|
367 | return sub._relpath | |
368 |
|
368 | |||
369 |
|
369 | |||
370 | def _abssource(repo, push=False, abort=True): |
|
370 | def _abssource(repo, push=False, abort=True): | |
371 | """return pull/push path of repo - either based on parent repo .hgsub info |
|
371 | """return pull/push path of repo - either based on parent repo .hgsub info | |
372 | or on the top repo config. Abort or return None if no source found.""" |
|
372 | or on the top repo config. Abort or return None if no source found.""" | |
373 | if util.safehasattr(repo, b'_subparent'): |
|
373 | if util.safehasattr(repo, b'_subparent'): | |
374 | source = util.url(repo._subsource) |
|
374 | source = util.url(repo._subsource) | |
375 | if source.isabs(): |
|
375 | if source.isabs(): | |
376 | return bytes(source) |
|
376 | return bytes(source) | |
377 | source.path = posixpath.normpath(source.path) |
|
377 | source.path = posixpath.normpath(source.path) | |
378 | parent = _abssource(repo._subparent, push, abort=False) |
|
378 | parent = _abssource(repo._subparent, push, abort=False) | |
379 | if parent: |
|
379 | if parent: | |
380 | parent = util.url(util.pconvert(parent)) |
|
380 | parent = util.url(util.pconvert(parent)) | |
381 | parent.path = posixpath.join(parent.path or b'', source.path) |
|
381 | parent.path = posixpath.join(parent.path or b'', source.path) | |
382 | parent.path = posixpath.normpath(parent.path) |
|
382 | parent.path = posixpath.normpath(parent.path) | |
383 | return bytes(parent) |
|
383 | return bytes(parent) | |
384 | else: # recursion reached top repo |
|
384 | else: # recursion reached top repo | |
385 | path = None |
|
385 | path = None | |
386 | if util.safehasattr(repo, b'_subtoppath'): |
|
386 | if util.safehasattr(repo, b'_subtoppath'): | |
387 | path = repo._subtoppath |
|
387 | path = repo._subtoppath | |
388 | elif push and repo.ui.config(b'paths', b'default-push'): |
|
388 | elif push and repo.ui.config(b'paths', b'default-push'): | |
389 | path = repo.ui.config(b'paths', b'default-push') |
|
389 | path = repo.ui.config(b'paths', b'default-push') | |
390 | elif repo.ui.config(b'paths', b'default'): |
|
390 | elif repo.ui.config(b'paths', b'default'): | |
391 | path = repo.ui.config(b'paths', b'default') |
|
391 | path = repo.ui.config(b'paths', b'default') | |
392 | elif repo.shared(): |
|
392 | elif repo.shared(): | |
393 | # chop off the .hg component to get the default path form. This has |
|
393 | # chop off the .hg component to get the default path form. This has | |
394 | # already run through vfsmod.vfs(..., realpath=True), so it doesn't |
|
394 | # already run through vfsmod.vfs(..., realpath=True), so it doesn't | |
395 | # have problems with 'C:' |
|
395 | # have problems with 'C:' | |
396 | return os.path.dirname(repo.sharedpath) |
|
396 | return os.path.dirname(repo.sharedpath) | |
397 | if path: |
|
397 | if path: | |
398 | # issue5770: 'C:\' and 'C:' are not equivalent paths. The former is |
|
398 | # issue5770: 'C:\' and 'C:' are not equivalent paths. The former is | |
399 | # as expected: an absolute path to the root of the C: drive. The |
|
399 | # as expected: an absolute path to the root of the C: drive. The | |
400 | # latter is a relative path, and works like so: |
|
400 | # latter is a relative path, and works like so: | |
401 | # |
|
401 | # | |
402 | # C:\>cd C:\some\path |
|
402 | # C:\>cd C:\some\path | |
403 | # C:\>D: |
|
403 | # C:\>D: | |
404 | # D:\>python -c "import os; print os.path.abspath('C:')" |
|
404 | # D:\>python -c "import os; print os.path.abspath('C:')" | |
405 | # C:\some\path |
|
405 | # C:\some\path | |
406 | # |
|
406 | # | |
407 | # D:\>python -c "import os; print os.path.abspath('C:relative')" |
|
407 | # D:\>python -c "import os; print os.path.abspath('C:relative')" | |
408 | # C:\some\path\relative |
|
408 | # C:\some\path\relative | |
409 | if util.hasdriveletter(path): |
|
409 | if util.hasdriveletter(path): | |
410 | if len(path) == 2 or path[2:3] not in br'\/': |
|
410 | if len(path) == 2 or path[2:3] not in br'\/': | |
411 | path = os.path.abspath(path) |
|
411 | path = os.path.abspath(path) | |
412 | return path |
|
412 | return path | |
413 |
|
413 | |||
414 | if abort: |
|
414 | if abort: | |
415 | raise error.Abort(_(b"default path for subrepository not found")) |
|
415 | raise error.Abort(_(b"default path for subrepository not found")) | |
416 |
|
416 | |||
417 |
|
417 | |||
418 | def newcommitphase(ui, ctx): |
|
418 | def newcommitphase(ui, ctx): | |
419 | commitphase = phases.newcommitphase(ui) |
|
419 | commitphase = phases.newcommitphase(ui) | |
420 | substate = getattr(ctx, "substate", None) |
|
420 | substate = getattr(ctx, "substate", None) | |
421 | if not substate: |
|
421 | if not substate: | |
422 | return commitphase |
|
422 | return commitphase | |
423 | check = ui.config(b'phases', b'checksubrepos') |
|
423 | check = ui.config(b'phases', b'checksubrepos') | |
424 | if check not in (b'ignore', b'follow', b'abort'): |
|
424 | if check not in (b'ignore', b'follow', b'abort'): | |
425 | raise error.Abort( |
|
425 | raise error.Abort( | |
426 | _(b'invalid phases.checksubrepos configuration: %s') % check |
|
426 | _(b'invalid phases.checksubrepos configuration: %s') % check | |
427 | ) |
|
427 | ) | |
428 | if check == b'ignore': |
|
428 | if check == b'ignore': | |
429 | return commitphase |
|
429 | return commitphase | |
430 | maxphase = phases.public |
|
430 | maxphase = phases.public | |
431 | maxsub = None |
|
431 | maxsub = None | |
432 | for s in sorted(substate): |
|
432 | for s in sorted(substate): | |
433 | sub = ctx.sub(s) |
|
433 | sub = ctx.sub(s) | |
434 | subphase = sub.phase(substate[s][1]) |
|
434 | subphase = sub.phase(substate[s][1]) | |
435 | if maxphase < subphase: |
|
435 | if maxphase < subphase: | |
436 | maxphase = subphase |
|
436 | maxphase = subphase | |
437 | maxsub = s |
|
437 | maxsub = s | |
438 | if commitphase < maxphase: |
|
438 | if commitphase < maxphase: | |
439 | if check == b'abort': |
|
439 | if check == b'abort': | |
440 | raise error.Abort( |
|
440 | raise error.Abort( | |
441 | _( |
|
441 | _( | |
442 | b"can't commit in %s phase" |
|
442 | b"can't commit in %s phase" | |
443 | b" conflicting %s from subrepository %s" |
|
443 | b" conflicting %s from subrepository %s" | |
444 | ) |
|
444 | ) | |
445 | % ( |
|
445 | % ( | |
446 | phases.phasenames[commitphase], |
|
446 | phases.phasenames[commitphase], | |
447 | phases.phasenames[maxphase], |
|
447 | phases.phasenames[maxphase], | |
448 | maxsub, |
|
448 | maxsub, | |
449 | ) |
|
449 | ) | |
450 | ) |
|
450 | ) | |
451 | ui.warn( |
|
451 | ui.warn( | |
452 | _( |
|
452 | _( | |
453 | b"warning: changes are committed in" |
|
453 | b"warning: changes are committed in" | |
454 | b" %s phase from subrepository %s\n" |
|
454 | b" %s phase from subrepository %s\n" | |
455 | ) |
|
455 | ) | |
456 | % (phases.phasenames[maxphase], maxsub) |
|
456 | % (phases.phasenames[maxphase], maxsub) | |
457 | ) |
|
457 | ) | |
458 | return maxphase |
|
458 | return maxphase | |
459 | return commitphase |
|
459 | return commitphase |
@@ -1,1114 +1,1114 b'' | |||||
1 | # templater.py - template expansion for output |
|
1 | # templater.py - template expansion for output | |
2 | # |
|
2 | # | |
3 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | """Slightly complicated template engine for commands and hgweb |
|
8 | """Slightly complicated template engine for commands and hgweb | |
9 |
|
9 | |||
10 | This module provides low-level interface to the template engine. See the |
|
10 | This module provides low-level interface to the template engine. See the | |
11 | formatter and cmdutil modules if you are looking for high-level functions |
|
11 | formatter and cmdutil modules if you are looking for high-level functions | |
12 | such as ``cmdutil.rendertemplate(ctx, tmpl)``. |
|
12 | such as ``cmdutil.rendertemplate(ctx, tmpl)``. | |
13 |
|
13 | |||
14 | Internal Data Types |
|
14 | Internal Data Types | |
15 | ------------------- |
|
15 | ------------------- | |
16 |
|
16 | |||
17 | Template keywords and functions take a dictionary of current symbols and |
|
17 | Template keywords and functions take a dictionary of current symbols and | |
18 | resources (a "mapping") and return result. Inputs and outputs must be one |
|
18 | resources (a "mapping") and return result. Inputs and outputs must be one | |
19 | of the following data types: |
|
19 | of the following data types: | |
20 |
|
20 | |||
21 | bytes |
|
21 | bytes | |
22 | a byte string, which is generally a human-readable text in local encoding. |
|
22 | a byte string, which is generally a human-readable text in local encoding. | |
23 |
|
23 | |||
24 | generator |
|
24 | generator | |
25 | a lazily-evaluated byte string, which is a possibly nested generator of |
|
25 | a lazily-evaluated byte string, which is a possibly nested generator of | |
26 | values of any printable types, and will be folded by ``stringify()`` |
|
26 | values of any printable types, and will be folded by ``stringify()`` | |
27 | or ``flatten()``. |
|
27 | or ``flatten()``. | |
28 |
|
28 | |||
29 | None |
|
29 | None | |
30 | sometimes represents an empty value, which can be stringified to ''. |
|
30 | sometimes represents an empty value, which can be stringified to ''. | |
31 |
|
31 | |||
32 | True, False, int, float |
|
32 | True, False, int, float | |
33 | can be stringified as such. |
|
33 | can be stringified as such. | |
34 |
|
34 | |||
35 | wrappedbytes, wrappedvalue |
|
35 | wrappedbytes, wrappedvalue | |
36 | a wrapper for the above printable types. |
|
36 | a wrapper for the above printable types. | |
37 |
|
37 | |||
38 | date |
|
38 | date | |
39 | represents a (unixtime, offset) tuple. |
|
39 | represents a (unixtime, offset) tuple. | |
40 |
|
40 | |||
41 | hybrid |
|
41 | hybrid | |
42 | represents a list/dict of printable values, which can also be converted |
|
42 | represents a list/dict of printable values, which can also be converted | |
43 | to mappings by % operator. |
|
43 | to mappings by % operator. | |
44 |
|
44 | |||
45 | hybriditem |
|
45 | hybriditem | |
46 | represents a scalar printable value, also supports % operator. |
|
46 | represents a scalar printable value, also supports % operator. | |
47 |
|
47 | |||
48 | revslist |
|
48 | revslist | |
49 | represents a list of revision numbers. |
|
49 | represents a list of revision numbers. | |
50 |
|
50 | |||
51 | mappinggenerator, mappinglist |
|
51 | mappinggenerator, mappinglist | |
52 | represents mappings (i.e. a list of dicts), which may have default |
|
52 | represents mappings (i.e. a list of dicts), which may have default | |
53 | output format. |
|
53 | output format. | |
54 |
|
54 | |||
55 | mappingdict |
|
55 | mappingdict | |
56 | represents a single mapping (i.e. a dict), which may have default output |
|
56 | represents a single mapping (i.e. a dict), which may have default output | |
57 | format. |
|
57 | format. | |
58 |
|
58 | |||
59 | mappingnone |
|
59 | mappingnone | |
60 | represents None of Optional[mappable], which will be mapped to an empty |
|
60 | represents None of Optional[mappable], which will be mapped to an empty | |
61 | string by % operation. |
|
61 | string by % operation. | |
62 |
|
62 | |||
63 | mappedgenerator |
|
63 | mappedgenerator | |
64 | a lazily-evaluated list of byte strings, which is e.g. a result of % |
|
64 | a lazily-evaluated list of byte strings, which is e.g. a result of % | |
65 | operation. |
|
65 | operation. | |
66 | """ |
|
66 | """ | |
67 |
|
67 | |||
68 | from __future__ import absolute_import, print_function |
|
68 | from __future__ import absolute_import, print_function | |
69 |
|
69 | |||
70 | import abc |
|
70 | import abc | |
71 | import os |
|
71 | import os | |
72 |
|
72 | |||
73 | from .i18n import _ |
|
73 | from .i18n import _ | |
74 | from .pycompat import getattr |
|
74 | from .pycompat import getattr | |
75 | from . import ( |
|
75 | from . import ( | |
76 | config, |
|
76 | config, | |
77 | encoding, |
|
77 | encoding, | |
78 | error, |
|
78 | error, | |
79 | parser, |
|
79 | parser, | |
80 | pycompat, |
|
80 | pycompat, | |
81 | templatefilters, |
|
81 | templatefilters, | |
82 | templatefuncs, |
|
82 | templatefuncs, | |
83 | templateutil, |
|
83 | templateutil, | |
84 | util, |
|
84 | util, | |
85 | ) |
|
85 | ) | |
86 | from .utils import ( |
|
86 | from .utils import ( | |
87 | resourceutil, |
|
87 | resourceutil, | |
88 | stringutil, |
|
88 | stringutil, | |
89 | ) |
|
89 | ) | |
90 |
|
90 | |||
91 | # template parsing |
|
91 | # template parsing | |
92 |
|
92 | |||
93 | elements = { |
|
93 | elements = { | |
94 | # token-type: binding-strength, primary, prefix, infix, suffix |
|
94 | # token-type: binding-strength, primary, prefix, infix, suffix | |
95 | b"(": (20, None, (b"group", 1, b")"), (b"func", 1, b")"), None), |
|
95 | b"(": (20, None, (b"group", 1, b")"), (b"func", 1, b")"), None), | |
96 | b".": (18, None, None, (b".", 18), None), |
|
96 | b".": (18, None, None, (b".", 18), None), | |
97 | b"%": (15, None, None, (b"%", 15), None), |
|
97 | b"%": (15, None, None, (b"%", 15), None), | |
98 | b"|": (15, None, None, (b"|", 15), None), |
|
98 | b"|": (15, None, None, (b"|", 15), None), | |
99 | b"*": (5, None, None, (b"*", 5), None), |
|
99 | b"*": (5, None, None, (b"*", 5), None), | |
100 | b"/": (5, None, None, (b"/", 5), None), |
|
100 | b"/": (5, None, None, (b"/", 5), None), | |
101 | b"+": (4, None, None, (b"+", 4), None), |
|
101 | b"+": (4, None, None, (b"+", 4), None), | |
102 | b"-": (4, None, (b"negate", 19), (b"-", 4), None), |
|
102 | b"-": (4, None, (b"negate", 19), (b"-", 4), None), | |
103 | b"=": (3, None, None, (b"keyvalue", 3), None), |
|
103 | b"=": (3, None, None, (b"keyvalue", 3), None), | |
104 | b",": (2, None, None, (b"list", 2), None), |
|
104 | b",": (2, None, None, (b"list", 2), None), | |
105 | b")": (0, None, None, None, None), |
|
105 | b")": (0, None, None, None, None), | |
106 | b"integer": (0, b"integer", None, None, None), |
|
106 | b"integer": (0, b"integer", None, None, None), | |
107 | b"symbol": (0, b"symbol", None, None, None), |
|
107 | b"symbol": (0, b"symbol", None, None, None), | |
108 | b"string": (0, b"string", None, None, None), |
|
108 | b"string": (0, b"string", None, None, None), | |
109 | b"template": (0, b"template", None, None, None), |
|
109 | b"template": (0, b"template", None, None, None), | |
110 | b"end": (0, None, None, None, None), |
|
110 | b"end": (0, None, None, None, None), | |
111 | } |
|
111 | } | |
112 |
|
112 | |||
113 |
|
113 | |||
114 | def tokenize(program, start, end, term=None): |
|
114 | def tokenize(program, start, end, term=None): | |
115 | """Parse a template expression into a stream of tokens, which must end |
|
115 | """Parse a template expression into a stream of tokens, which must end | |
116 | with term if specified""" |
|
116 | with term if specified""" | |
117 | pos = start |
|
117 | pos = start | |
118 | program = pycompat.bytestr(program) |
|
118 | program = pycompat.bytestr(program) | |
119 | while pos < end: |
|
119 | while pos < end: | |
120 | c = program[pos] |
|
120 | c = program[pos] | |
121 | if c.isspace(): # skip inter-token whitespace |
|
121 | if c.isspace(): # skip inter-token whitespace | |
122 | pass |
|
122 | pass | |
123 | elif c in b"(=,).%|+-*/": # handle simple operators |
|
123 | elif c in b"(=,).%|+-*/": # handle simple operators | |
124 | yield (c, None, pos) |
|
124 | yield (c, None, pos) | |
125 | elif c in b'"\'': # handle quoted templates |
|
125 | elif c in b'"\'': # handle quoted templates | |
126 | s = pos + 1 |
|
126 | s = pos + 1 | |
127 | data, pos = _parsetemplate(program, s, end, c) |
|
127 | data, pos = _parsetemplate(program, s, end, c) | |
128 | yield (b'template', data, s) |
|
128 | yield (b'template', data, s) | |
129 | pos -= 1 |
|
129 | pos -= 1 | |
130 | elif c == b'r' and program[pos : pos + 2] in (b"r'", b'r"'): |
|
130 | elif c == b'r' and program[pos : pos + 2] in (b"r'", b'r"'): | |
131 | # handle quoted strings |
|
131 | # handle quoted strings | |
132 | c = program[pos + 1] |
|
132 | c = program[pos + 1] | |
133 | s = pos = pos + 2 |
|
133 | s = pos = pos + 2 | |
134 | while pos < end: # find closing quote |
|
134 | while pos < end: # find closing quote | |
135 | d = program[pos] |
|
135 | d = program[pos] | |
136 | if d == b'\\': # skip over escaped characters |
|
136 | if d == b'\\': # skip over escaped characters | |
137 | pos += 2 |
|
137 | pos += 2 | |
138 | continue |
|
138 | continue | |
139 | if d == c: |
|
139 | if d == c: | |
140 | yield (b'string', program[s:pos], s) |
|
140 | yield (b'string', program[s:pos], s) | |
141 | break |
|
141 | break | |
142 | pos += 1 |
|
142 | pos += 1 | |
143 | else: |
|
143 | else: | |
144 | raise error.ParseError(_(b"unterminated string"), s) |
|
144 | raise error.ParseError(_(b"unterminated string"), s) | |
145 | elif c.isdigit(): |
|
145 | elif c.isdigit(): | |
146 | s = pos |
|
146 | s = pos | |
147 | while pos < end: |
|
147 | while pos < end: | |
148 | d = program[pos] |
|
148 | d = program[pos] | |
149 | if not d.isdigit(): |
|
149 | if not d.isdigit(): | |
150 | break |
|
150 | break | |
151 | pos += 1 |
|
151 | pos += 1 | |
152 | yield (b'integer', program[s:pos], s) |
|
152 | yield (b'integer', program[s:pos], s) | |
153 | pos -= 1 |
|
153 | pos -= 1 | |
154 | elif ( |
|
154 | elif ( | |
155 | c == b'\\' |
|
155 | c == b'\\' | |
156 | and program[pos : pos + 2] in (br"\'", br'\"') |
|
156 | and program[pos : pos + 2] in (br"\'", br'\"') | |
157 | or c == b'r' |
|
157 | or c == b'r' | |
158 | and program[pos : pos + 3] in (br"r\'", br'r\"') |
|
158 | and program[pos : pos + 3] in (br"r\'", br'r\"') | |
159 | ): |
|
159 | ): | |
160 | # handle escaped quoted strings for compatibility with 2.9.2-3.4, |
|
160 | # handle escaped quoted strings for compatibility with 2.9.2-3.4, | |
161 | # where some of nested templates were preprocessed as strings and |
|
161 | # where some of nested templates were preprocessed as strings and | |
162 | # then compiled. therefore, \"...\" was allowed. (issue4733) |
|
162 | # then compiled. therefore, \"...\" was allowed. (issue4733) | |
163 | # |
|
163 | # | |
164 | # processing flow of _evalifliteral() at 5ab28a2e9962: |
|
164 | # processing flow of _evalifliteral() at 5ab28a2e9962: | |
165 | # outer template string -> stringify() -> compiletemplate() |
|
165 | # outer template string -> stringify() -> compiletemplate() | |
166 | # ------------------------ ------------ ------------------ |
|
166 | # ------------------------ ------------ ------------------ | |
167 | # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}] |
|
167 | # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}] | |
168 | # ~~~~~~~~ |
|
168 | # ~~~~~~~~ | |
169 | # escaped quoted string |
|
169 | # escaped quoted string | |
170 | if c == b'r': |
|
170 | if c == b'r': | |
171 | pos += 1 |
|
171 | pos += 1 | |
172 | token = b'string' |
|
172 | token = b'string' | |
173 | else: |
|
173 | else: | |
174 | token = b'template' |
|
174 | token = b'template' | |
175 | quote = program[pos : pos + 2] |
|
175 | quote = program[pos : pos + 2] | |
176 | s = pos = pos + 2 |
|
176 | s = pos = pos + 2 | |
177 | while pos < end: # find closing escaped quote |
|
177 | while pos < end: # find closing escaped quote | |
178 | if program.startswith(b'\\\\\\', pos, end): |
|
178 | if program.startswith(b'\\\\\\', pos, end): | |
179 | pos += 4 # skip over double escaped characters |
|
179 | pos += 4 # skip over double escaped characters | |
180 | continue |
|
180 | continue | |
181 | if program.startswith(quote, pos, end): |
|
181 | if program.startswith(quote, pos, end): | |
182 | # interpret as if it were a part of an outer string |
|
182 | # interpret as if it were a part of an outer string | |
183 | data = parser.unescapestr(program[s:pos]) |
|
183 | data = parser.unescapestr(program[s:pos]) | |
184 | if token == b'template': |
|
184 | if token == b'template': | |
185 | data = _parsetemplate(data, 0, len(data))[0] |
|
185 | data = _parsetemplate(data, 0, len(data))[0] | |
186 | yield (token, data, s) |
|
186 | yield (token, data, s) | |
187 | pos += 1 |
|
187 | pos += 1 | |
188 | break |
|
188 | break | |
189 | pos += 1 |
|
189 | pos += 1 | |
190 | else: |
|
190 | else: | |
191 | raise error.ParseError(_(b"unterminated string"), s) |
|
191 | raise error.ParseError(_(b"unterminated string"), s) | |
192 | elif c.isalnum() or c in b'_': |
|
192 | elif c.isalnum() or c in b'_': | |
193 | s = pos |
|
193 | s = pos | |
194 | pos += 1 |
|
194 | pos += 1 | |
195 | while pos < end: # find end of symbol |
|
195 | while pos < end: # find end of symbol | |
196 | d = program[pos] |
|
196 | d = program[pos] | |
197 | if not (d.isalnum() or d == b"_"): |
|
197 | if not (d.isalnum() or d == b"_"): | |
198 | break |
|
198 | break | |
199 | pos += 1 |
|
199 | pos += 1 | |
200 | sym = program[s:pos] |
|
200 | sym = program[s:pos] | |
201 | yield (b'symbol', sym, s) |
|
201 | yield (b'symbol', sym, s) | |
202 | pos -= 1 |
|
202 | pos -= 1 | |
203 | elif c == term: |
|
203 | elif c == term: | |
204 | yield (b'end', None, pos) |
|
204 | yield (b'end', None, pos) | |
205 | return |
|
205 | return | |
206 | else: |
|
206 | else: | |
207 | raise error.ParseError(_(b"syntax error"), pos) |
|
207 | raise error.ParseError(_(b"syntax error"), pos) | |
208 | pos += 1 |
|
208 | pos += 1 | |
209 | if term: |
|
209 | if term: | |
210 | raise error.ParseError(_(b"unterminated template expansion"), start) |
|
210 | raise error.ParseError(_(b"unterminated template expansion"), start) | |
211 | yield (b'end', None, pos) |
|
211 | yield (b'end', None, pos) | |
212 |
|
212 | |||
213 |
|
213 | |||
214 | def _parsetemplate(tmpl, start, stop, quote=b''): |
|
214 | def _parsetemplate(tmpl, start, stop, quote=b''): | |
215 | r""" |
|
215 | r""" | |
216 | >>> _parsetemplate(b'foo{bar}"baz', 0, 12) |
|
216 | >>> _parsetemplate(b'foo{bar}"baz', 0, 12) | |
217 | ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12) |
|
217 | ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12) | |
218 | >>> _parsetemplate(b'foo{bar}"baz', 0, 12, quote=b'"') |
|
218 | >>> _parsetemplate(b'foo{bar}"baz', 0, 12, quote=b'"') | |
219 | ([('string', 'foo'), ('symbol', 'bar')], 9) |
|
219 | ([('string', 'foo'), ('symbol', 'bar')], 9) | |
220 | >>> _parsetemplate(b'foo"{bar}', 0, 9, quote=b'"') |
|
220 | >>> _parsetemplate(b'foo"{bar}', 0, 9, quote=b'"') | |
221 | ([('string', 'foo')], 4) |
|
221 | ([('string', 'foo')], 4) | |
222 | >>> _parsetemplate(br'foo\"bar"baz', 0, 12, quote=b'"') |
|
222 | >>> _parsetemplate(br'foo\"bar"baz', 0, 12, quote=b'"') | |
223 | ([('string', 'foo"'), ('string', 'bar')], 9) |
|
223 | ([('string', 'foo"'), ('string', 'bar')], 9) | |
224 | >>> _parsetemplate(br'foo\\"bar', 0, 10, quote=b'"') |
|
224 | >>> _parsetemplate(br'foo\\"bar', 0, 10, quote=b'"') | |
225 | ([('string', 'foo\\')], 6) |
|
225 | ([('string', 'foo\\')], 6) | |
226 | """ |
|
226 | """ | |
227 | parsed = [] |
|
227 | parsed = [] | |
228 | for typ, val, pos in _scantemplate(tmpl, start, stop, quote): |
|
228 | for typ, val, pos in _scantemplate(tmpl, start, stop, quote): | |
229 | if typ == b'string': |
|
229 | if typ == b'string': | |
230 | parsed.append((typ, val)) |
|
230 | parsed.append((typ, val)) | |
231 | elif typ == b'template': |
|
231 | elif typ == b'template': | |
232 | parsed.append(val) |
|
232 | parsed.append(val) | |
233 | elif typ == b'end': |
|
233 | elif typ == b'end': | |
234 | return parsed, pos |
|
234 | return parsed, pos | |
235 | else: |
|
235 | else: | |
236 | raise error.ProgrammingError(b'unexpected type: %s' % typ) |
|
236 | raise error.ProgrammingError(b'unexpected type: %s' % typ) | |
237 | raise error.ProgrammingError(b'unterminated scanning of template') |
|
237 | raise error.ProgrammingError(b'unterminated scanning of template') | |
238 |
|
238 | |||
239 |
|
239 | |||
240 | def scantemplate(tmpl, raw=False): |
|
240 | def scantemplate(tmpl, raw=False): | |
241 | r"""Scan (type, start, end) positions of outermost elements in template |
|
241 | r"""Scan (type, start, end) positions of outermost elements in template | |
242 |
|
242 | |||
243 | If raw=True, a backslash is not taken as an escape character just like |
|
243 | If raw=True, a backslash is not taken as an escape character just like | |
244 | r'' string in Python. Note that this is different from r'' literal in |
|
244 | r'' string in Python. Note that this is different from r'' literal in | |
245 | template in that no template fragment can appear in r'', e.g. r'{foo}' |
|
245 | template in that no template fragment can appear in r'', e.g. r'{foo}' | |
246 | is a literal '{foo}', but ('{foo}', raw=True) is a template expression |
|
246 | is a literal '{foo}', but ('{foo}', raw=True) is a template expression | |
247 | 'foo'. |
|
247 | 'foo'. | |
248 |
|
248 | |||
249 | >>> list(scantemplate(b'foo{bar}"baz')) |
|
249 | >>> list(scantemplate(b'foo{bar}"baz')) | |
250 | [('string', 0, 3), ('template', 3, 8), ('string', 8, 12)] |
|
250 | [('string', 0, 3), ('template', 3, 8), ('string', 8, 12)] | |
251 | >>> list(scantemplate(b'outer{"inner"}outer')) |
|
251 | >>> list(scantemplate(b'outer{"inner"}outer')) | |
252 | [('string', 0, 5), ('template', 5, 14), ('string', 14, 19)] |
|
252 | [('string', 0, 5), ('template', 5, 14), ('string', 14, 19)] | |
253 | >>> list(scantemplate(b'foo\\{escaped}')) |
|
253 | >>> list(scantemplate(b'foo\\{escaped}')) | |
254 | [('string', 0, 5), ('string', 5, 13)] |
|
254 | [('string', 0, 5), ('string', 5, 13)] | |
255 | >>> list(scantemplate(b'foo\\{escaped}', raw=True)) |
|
255 | >>> list(scantemplate(b'foo\\{escaped}', raw=True)) | |
256 | [('string', 0, 4), ('template', 4, 13)] |
|
256 | [('string', 0, 4), ('template', 4, 13)] | |
257 | """ |
|
257 | """ | |
258 | last = None |
|
258 | last = None | |
259 | for typ, val, pos in _scantemplate(tmpl, 0, len(tmpl), raw=raw): |
|
259 | for typ, val, pos in _scantemplate(tmpl, 0, len(tmpl), raw=raw): | |
260 | if last: |
|
260 | if last: | |
261 | yield last + (pos,) |
|
261 | yield last + (pos,) | |
262 | if typ == b'end': |
|
262 | if typ == b'end': | |
263 | return |
|
263 | return | |
264 | else: |
|
264 | else: | |
265 | last = (typ, pos) |
|
265 | last = (typ, pos) | |
266 | raise error.ProgrammingError(b'unterminated scanning of template') |
|
266 | raise error.ProgrammingError(b'unterminated scanning of template') | |
267 |
|
267 | |||
268 |
|
268 | |||
269 | def _scantemplate(tmpl, start, stop, quote=b'', raw=False): |
|
269 | def _scantemplate(tmpl, start, stop, quote=b'', raw=False): | |
270 | """Parse template string into chunks of strings and template expressions""" |
|
270 | """Parse template string into chunks of strings and template expressions""" | |
271 | sepchars = b'{' + quote |
|
271 | sepchars = b'{' + quote | |
272 | unescape = [parser.unescapestr, pycompat.identity][raw] |
|
272 | unescape = [parser.unescapestr, pycompat.identity][raw] | |
273 | pos = start |
|
273 | pos = start | |
274 | p = parser.parser(elements) |
|
274 | p = parser.parser(elements) | |
275 | try: |
|
275 | try: | |
276 | while pos < stop: |
|
276 | while pos < stop: | |
277 | n = min( |
|
277 | n = min( | |
278 | (tmpl.find(c, pos, stop) for c in pycompat.bytestr(sepchars)), |
|
278 | (tmpl.find(c, pos, stop) for c in pycompat.bytestr(sepchars)), | |
279 | key=lambda n: (n < 0, n), |
|
279 | key=lambda n: (n < 0, n), | |
280 | ) |
|
280 | ) | |
281 | if n < 0: |
|
281 | if n < 0: | |
282 | yield (b'string', unescape(tmpl[pos:stop]), pos) |
|
282 | yield (b'string', unescape(tmpl[pos:stop]), pos) | |
283 | pos = stop |
|
283 | pos = stop | |
284 | break |
|
284 | break | |
285 | c = tmpl[n : n + 1] |
|
285 | c = tmpl[n : n + 1] | |
286 | bs = 0 # count leading backslashes |
|
286 | bs = 0 # count leading backslashes | |
287 | if not raw: |
|
287 | if not raw: | |
288 | bs = (n - pos) - len(tmpl[pos:n].rstrip(b'\\')) |
|
288 | bs = (n - pos) - len(tmpl[pos:n].rstrip(b'\\')) | |
289 | if bs % 2 == 1: |
|
289 | if bs % 2 == 1: | |
290 | # escaped (e.g. '\{', '\\\{', but not '\\{') |
|
290 | # escaped (e.g. '\{', '\\\{', but not '\\{') | |
291 | yield (b'string', unescape(tmpl[pos : n - 1]) + c, pos) |
|
291 | yield (b'string', unescape(tmpl[pos : n - 1]) + c, pos) | |
292 | pos = n + 1 |
|
292 | pos = n + 1 | |
293 | continue |
|
293 | continue | |
294 | if n > pos: |
|
294 | if n > pos: | |
295 | yield (b'string', unescape(tmpl[pos:n]), pos) |
|
295 | yield (b'string', unescape(tmpl[pos:n]), pos) | |
296 | if c == quote: |
|
296 | if c == quote: | |
297 | yield (b'end', None, n + 1) |
|
297 | yield (b'end', None, n + 1) | |
298 | return |
|
298 | return | |
299 |
|
299 | |||
300 | parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, b'}')) |
|
300 | parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, b'}')) | |
301 | if not tmpl.startswith(b'}', pos): |
|
301 | if not tmpl.startswith(b'}', pos): | |
302 | raise error.ParseError(_(b"invalid token"), pos) |
|
302 | raise error.ParseError(_(b"invalid token"), pos) | |
303 | yield (b'template', parseres, n) |
|
303 | yield (b'template', parseres, n) | |
304 | pos += 1 |
|
304 | pos += 1 | |
305 |
|
305 | |||
306 | if quote: |
|
306 | if quote: | |
307 | raise error.ParseError(_(b"unterminated string"), start) |
|
307 | raise error.ParseError(_(b"unterminated string"), start) | |
308 | except error.ParseError as inst: |
|
308 | except error.ParseError as inst: | |
309 | _addparseerrorhint(inst, tmpl) |
|
309 | _addparseerrorhint(inst, tmpl) | |
310 | raise |
|
310 | raise | |
311 | yield (b'end', None, pos) |
|
311 | yield (b'end', None, pos) | |
312 |
|
312 | |||
313 |
|
313 | |||
314 | def _addparseerrorhint(inst, tmpl): |
|
314 | def _addparseerrorhint(inst, tmpl): | |
315 | if len(inst.args) <= 1: |
|
315 | if len(inst.args) <= 1: | |
316 | return # no location |
|
316 | return # no location | |
317 | loc = inst.args[1] |
|
317 | loc = inst.args[1] | |
318 | # Offset the caret location by the number of newlines before the |
|
318 | # Offset the caret location by the number of newlines before the | |
319 | # location of the error, since we will replace one-char newlines |
|
319 | # location of the error, since we will replace one-char newlines | |
320 | # with the two-char literal r'\n'. |
|
320 | # with the two-char literal r'\n'. | |
321 | offset = tmpl[:loc].count(b'\n') |
|
321 | offset = tmpl[:loc].count(b'\n') | |
322 | tmpl = tmpl.replace(b'\n', br'\n') |
|
322 | tmpl = tmpl.replace(b'\n', br'\n') | |
323 | # We want the caret to point to the place in the template that |
|
323 | # We want the caret to point to the place in the template that | |
324 | # failed to parse, but in a hint we get a open paren at the |
|
324 | # failed to parse, but in a hint we get a open paren at the | |
325 | # start. Therefore, we print "loc + 1" spaces (instead of "loc") |
|
325 | # start. Therefore, we print "loc + 1" spaces (instead of "loc") | |
326 | # to line up the caret with the location of the error. |
|
326 | # to line up the caret with the location of the error. | |
327 | inst.hint = tmpl + b'\n' + b' ' * (loc + 1 + offset) + b'^ ' + _(b'here') |
|
327 | inst.hint = tmpl + b'\n' + b' ' * (loc + 1 + offset) + b'^ ' + _(b'here') | |
328 |
|
328 | |||
329 |
|
329 | |||
330 | def _unnesttemplatelist(tree): |
|
330 | def _unnesttemplatelist(tree): | |
331 | """Expand list of templates to node tuple |
|
331 | """Expand list of templates to node tuple | |
332 |
|
332 | |||
333 | >>> def f(tree): |
|
333 | >>> def f(tree): | |
334 | ... print(pycompat.sysstr(prettyformat(_unnesttemplatelist(tree)))) |
|
334 | ... print(pycompat.sysstr(prettyformat(_unnesttemplatelist(tree)))) | |
335 | >>> f((b'template', [])) |
|
335 | >>> f((b'template', [])) | |
336 | (string '') |
|
336 | (string '') | |
337 | >>> f((b'template', [(b'string', b'foo')])) |
|
337 | >>> f((b'template', [(b'string', b'foo')])) | |
338 | (string 'foo') |
|
338 | (string 'foo') | |
339 | >>> f((b'template', [(b'string', b'foo'), (b'symbol', b'rev')])) |
|
339 | >>> f((b'template', [(b'string', b'foo'), (b'symbol', b'rev')])) | |
340 | (template |
|
340 | (template | |
341 | (string 'foo') |
|
341 | (string 'foo') | |
342 | (symbol 'rev')) |
|
342 | (symbol 'rev')) | |
343 | >>> f((b'template', [(b'symbol', b'rev')])) # template(rev) -> str |
|
343 | >>> f((b'template', [(b'symbol', b'rev')])) # template(rev) -> str | |
344 | (template |
|
344 | (template | |
345 | (symbol 'rev')) |
|
345 | (symbol 'rev')) | |
346 | >>> f((b'template', [(b'template', [(b'string', b'foo')])])) |
|
346 | >>> f((b'template', [(b'template', [(b'string', b'foo')])])) | |
347 | (string 'foo') |
|
347 | (string 'foo') | |
348 | """ |
|
348 | """ | |
349 | if not isinstance(tree, tuple): |
|
349 | if not isinstance(tree, tuple): | |
350 | return tree |
|
350 | return tree | |
351 | op = tree[0] |
|
351 | op = tree[0] | |
352 | if op != b'template': |
|
352 | if op != b'template': | |
353 | return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:]) |
|
353 | return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:]) | |
354 |
|
354 | |||
355 | assert len(tree) == 2 |
|
355 | assert len(tree) == 2 | |
356 | xs = tuple(_unnesttemplatelist(x) for x in tree[1]) |
|
356 | xs = tuple(_unnesttemplatelist(x) for x in tree[1]) | |
357 | if not xs: |
|
357 | if not xs: | |
358 | return (b'string', b'') # empty template "" |
|
358 | return (b'string', b'') # empty template "" | |
359 | elif len(xs) == 1 and xs[0][0] == b'string': |
|
359 | elif len(xs) == 1 and xs[0][0] == b'string': | |
360 | return xs[0] # fast path for string with no template fragment "x" |
|
360 | return xs[0] # fast path for string with no template fragment "x" | |
361 | else: |
|
361 | else: | |
362 | return (op,) + xs |
|
362 | return (op,) + xs | |
363 |
|
363 | |||
364 |
|
364 | |||
365 | def parse(tmpl): |
|
365 | def parse(tmpl): | |
366 | """Parse template string into tree""" |
|
366 | """Parse template string into tree""" | |
367 | parsed, pos = _parsetemplate(tmpl, 0, len(tmpl)) |
|
367 | parsed, pos = _parsetemplate(tmpl, 0, len(tmpl)) | |
368 | assert pos == len(tmpl), b'unquoted template should be consumed' |
|
368 | assert pos == len(tmpl), b'unquoted template should be consumed' | |
369 | return _unnesttemplatelist((b'template', parsed)) |
|
369 | return _unnesttemplatelist((b'template', parsed)) | |
370 |
|
370 | |||
371 |
|
371 | |||
372 | def parseexpr(expr): |
|
372 | def parseexpr(expr): | |
373 | """Parse a template expression into tree |
|
373 | """Parse a template expression into tree | |
374 |
|
374 | |||
375 | >>> parseexpr(b'"foo"') |
|
375 | >>> parseexpr(b'"foo"') | |
376 | ('string', 'foo') |
|
376 | ('string', 'foo') | |
377 | >>> parseexpr(b'foo(bar)') |
|
377 | >>> parseexpr(b'foo(bar)') | |
378 | ('func', ('symbol', 'foo'), ('symbol', 'bar')) |
|
378 | ('func', ('symbol', 'foo'), ('symbol', 'bar')) | |
379 | >>> parseexpr(b'foo(') |
|
379 | >>> parseexpr(b'foo(') | |
380 | Traceback (most recent call last): |
|
380 | Traceback (most recent call last): | |
381 | ... |
|
381 | ... | |
382 | ParseError: ('not a prefix: end', 4) |
|
382 | ParseError: ('not a prefix: end', 4) | |
383 | >>> parseexpr(b'"foo" "bar"') |
|
383 | >>> parseexpr(b'"foo" "bar"') | |
384 | Traceback (most recent call last): |
|
384 | Traceback (most recent call last): | |
385 | ... |
|
385 | ... | |
386 | ParseError: ('invalid token', 7) |
|
386 | ParseError: ('invalid token', 7) | |
387 | """ |
|
387 | """ | |
388 | try: |
|
388 | try: | |
389 | return _parseexpr(expr) |
|
389 | return _parseexpr(expr) | |
390 | except error.ParseError as inst: |
|
390 | except error.ParseError as inst: | |
391 | _addparseerrorhint(inst, expr) |
|
391 | _addparseerrorhint(inst, expr) | |
392 | raise |
|
392 | raise | |
393 |
|
393 | |||
394 |
|
394 | |||
395 | def _parseexpr(expr): |
|
395 | def _parseexpr(expr): | |
396 | p = parser.parser(elements) |
|
396 | p = parser.parser(elements) | |
397 | tree, pos = p.parse(tokenize(expr, 0, len(expr))) |
|
397 | tree, pos = p.parse(tokenize(expr, 0, len(expr))) | |
398 | if pos != len(expr): |
|
398 | if pos != len(expr): | |
399 | raise error.ParseError(_(b'invalid token'), pos) |
|
399 | raise error.ParseError(_(b'invalid token'), pos) | |
400 | return _unnesttemplatelist(tree) |
|
400 | return _unnesttemplatelist(tree) | |
401 |
|
401 | |||
402 |
|
402 | |||
403 | def prettyformat(tree): |
|
403 | def prettyformat(tree): | |
404 | return parser.prettyformat(tree, (b'integer', b'string', b'symbol')) |
|
404 | return parser.prettyformat(tree, (b'integer', b'string', b'symbol')) | |
405 |
|
405 | |||
406 |
|
406 | |||
407 | def compileexp(exp, context, curmethods): |
|
407 | def compileexp(exp, context, curmethods): | |
408 | """Compile parsed template tree to (func, data) pair""" |
|
408 | """Compile parsed template tree to (func, data) pair""" | |
409 | if not exp: |
|
409 | if not exp: | |
410 | raise error.ParseError(_(b"missing argument")) |
|
410 | raise error.ParseError(_(b"missing argument")) | |
411 | t = exp[0] |
|
411 | t = exp[0] | |
412 | return curmethods[t](exp, context) |
|
412 | return curmethods[t](exp, context) | |
413 |
|
413 | |||
414 |
|
414 | |||
415 | # template evaluation |
|
415 | # template evaluation | |
416 |
|
416 | |||
417 |
|
417 | |||
418 | def getsymbol(exp): |
|
418 | def getsymbol(exp): | |
419 | if exp[0] == b'symbol': |
|
419 | if exp[0] == b'symbol': | |
420 | return exp[1] |
|
420 | return exp[1] | |
421 | raise error.ParseError(_(b"expected a symbol, got '%s'") % exp[0]) |
|
421 | raise error.ParseError(_(b"expected a symbol, got '%s'") % exp[0]) | |
422 |
|
422 | |||
423 |
|
423 | |||
424 | def getlist(x): |
|
424 | def getlist(x): | |
425 | if not x: |
|
425 | if not x: | |
426 | return [] |
|
426 | return [] | |
427 | if x[0] == b'list': |
|
427 | if x[0] == b'list': | |
428 | return getlist(x[1]) + [x[2]] |
|
428 | return getlist(x[1]) + [x[2]] | |
429 | return [x] |
|
429 | return [x] | |
430 |
|
430 | |||
431 |
|
431 | |||
432 | def gettemplate(exp, context): |
|
432 | def gettemplate(exp, context): | |
433 | """Compile given template tree or load named template from map file; |
|
433 | """Compile given template tree or load named template from map file; | |
434 | returns (func, data) pair""" |
|
434 | returns (func, data) pair""" | |
435 | if exp[0] in (b'template', b'string'): |
|
435 | if exp[0] in (b'template', b'string'): | |
436 | return compileexp(exp, context, methods) |
|
436 | return compileexp(exp, context, methods) | |
437 | if exp[0] == b'symbol': |
|
437 | if exp[0] == b'symbol': | |
438 | # unlike runsymbol(), here 'symbol' is always taken as template name |
|
438 | # unlike runsymbol(), here 'symbol' is always taken as template name | |
439 | # even if it exists in mapping. this allows us to override mapping |
|
439 | # even if it exists in mapping. this allows us to override mapping | |
440 | # by web templates, e.g. 'changelogtag' is redefined in map file. |
|
440 | # by web templates, e.g. 'changelogtag' is redefined in map file. | |
441 | return context._load(exp[1]) |
|
441 | return context._load(exp[1]) | |
442 | raise error.ParseError(_(b"expected template specifier")) |
|
442 | raise error.ParseError(_(b"expected template specifier")) | |
443 |
|
443 | |||
444 |
|
444 | |||
445 | def _runrecursivesymbol(context, mapping, key): |
|
445 | def _runrecursivesymbol(context, mapping, key): | |
446 | raise error.Abort(_(b"recursive reference '%s' in template") % key) |
|
446 | raise error.Abort(_(b"recursive reference '%s' in template") % key) | |
447 |
|
447 | |||
448 |
|
448 | |||
449 | def buildtemplate(exp, context): |
|
449 | def buildtemplate(exp, context): | |
450 | ctmpl = [compileexp(e, context, methods) for e in exp[1:]] |
|
450 | ctmpl = [compileexp(e, context, methods) for e in exp[1:]] | |
451 | return (templateutil.runtemplate, ctmpl) |
|
451 | return (templateutil.runtemplate, ctmpl) | |
452 |
|
452 | |||
453 |
|
453 | |||
454 | def buildfilter(exp, context): |
|
454 | def buildfilter(exp, context): | |
455 | n = getsymbol(exp[2]) |
|
455 | n = getsymbol(exp[2]) | |
456 | if n in context._filters: |
|
456 | if n in context._filters: | |
457 | filt = context._filters[n] |
|
457 | filt = context._filters[n] | |
458 | arg = compileexp(exp[1], context, methods) |
|
458 | arg = compileexp(exp[1], context, methods) | |
459 | return (templateutil.runfilter, (arg, filt)) |
|
459 | return (templateutil.runfilter, (arg, filt)) | |
460 | if n in context._funcs: |
|
460 | if n in context._funcs: | |
461 | f = context._funcs[n] |
|
461 | f = context._funcs[n] | |
462 | args = _buildfuncargs(exp[1], context, methods, n, f._argspec) |
|
462 | args = _buildfuncargs(exp[1], context, methods, n, f._argspec) | |
463 | return (f, args) |
|
463 | return (f, args) | |
464 | raise error.ParseError(_(b"unknown function '%s'") % n) |
|
464 | raise error.ParseError(_(b"unknown function '%s'") % n) | |
465 |
|
465 | |||
466 |
|
466 | |||
467 | def buildmap(exp, context): |
|
467 | def buildmap(exp, context): | |
468 | darg = compileexp(exp[1], context, methods) |
|
468 | darg = compileexp(exp[1], context, methods) | |
469 | targ = gettemplate(exp[2], context) |
|
469 | targ = gettemplate(exp[2], context) | |
470 | return (templateutil.runmap, (darg, targ)) |
|
470 | return (templateutil.runmap, (darg, targ)) | |
471 |
|
471 | |||
472 |
|
472 | |||
473 | def buildmember(exp, context): |
|
473 | def buildmember(exp, context): | |
474 | darg = compileexp(exp[1], context, methods) |
|
474 | darg = compileexp(exp[1], context, methods) | |
475 | memb = getsymbol(exp[2]) |
|
475 | memb = getsymbol(exp[2]) | |
476 | return (templateutil.runmember, (darg, memb)) |
|
476 | return (templateutil.runmember, (darg, memb)) | |
477 |
|
477 | |||
478 |
|
478 | |||
479 | def buildnegate(exp, context): |
|
479 | def buildnegate(exp, context): | |
480 | arg = compileexp(exp[1], context, exprmethods) |
|
480 | arg = compileexp(exp[1], context, exprmethods) | |
481 | return (templateutil.runnegate, arg) |
|
481 | return (templateutil.runnegate, arg) | |
482 |
|
482 | |||
483 |
|
483 | |||
484 | def buildarithmetic(exp, context, func): |
|
484 | def buildarithmetic(exp, context, func): | |
485 | left = compileexp(exp[1], context, exprmethods) |
|
485 | left = compileexp(exp[1], context, exprmethods) | |
486 | right = compileexp(exp[2], context, exprmethods) |
|
486 | right = compileexp(exp[2], context, exprmethods) | |
487 | return (templateutil.runarithmetic, (func, left, right)) |
|
487 | return (templateutil.runarithmetic, (func, left, right)) | |
488 |
|
488 | |||
489 |
|
489 | |||
490 | def buildfunc(exp, context): |
|
490 | def buildfunc(exp, context): | |
491 | n = getsymbol(exp[1]) |
|
491 | n = getsymbol(exp[1]) | |
492 | if n in context._funcs: |
|
492 | if n in context._funcs: | |
493 | f = context._funcs[n] |
|
493 | f = context._funcs[n] | |
494 | args = _buildfuncargs(exp[2], context, exprmethods, n, f._argspec) |
|
494 | args = _buildfuncargs(exp[2], context, exprmethods, n, f._argspec) | |
495 | return (f, args) |
|
495 | return (f, args) | |
496 | if n in context._filters: |
|
496 | if n in context._filters: | |
497 | args = _buildfuncargs(exp[2], context, exprmethods, n, argspec=None) |
|
497 | args = _buildfuncargs(exp[2], context, exprmethods, n, argspec=None) | |
498 | if len(args) != 1: |
|
498 | if len(args) != 1: | |
499 | raise error.ParseError(_(b"filter %s expects one argument") % n) |
|
499 | raise error.ParseError(_(b"filter %s expects one argument") % n) | |
500 | f = context._filters[n] |
|
500 | f = context._filters[n] | |
501 | return (templateutil.runfilter, (args[0], f)) |
|
501 | return (templateutil.runfilter, (args[0], f)) | |
502 | raise error.ParseError(_(b"unknown function '%s'") % n) |
|
502 | raise error.ParseError(_(b"unknown function '%s'") % n) | |
503 |
|
503 | |||
504 |
|
504 | |||
505 | def _buildfuncargs(exp, context, curmethods, funcname, argspec): |
|
505 | def _buildfuncargs(exp, context, curmethods, funcname, argspec): | |
506 | """Compile parsed tree of function arguments into list or dict of |
|
506 | """Compile parsed tree of function arguments into list or dict of | |
507 | (func, data) pairs |
|
507 | (func, data) pairs | |
508 |
|
508 | |||
509 | >>> context = engine(lambda t: (templateutil.runsymbol, t)) |
|
509 | >>> context = engine(lambda t: (templateutil.runsymbol, t)) | |
510 | >>> def fargs(expr, argspec): |
|
510 | >>> def fargs(expr, argspec): | |
511 | ... x = _parseexpr(expr) |
|
511 | ... x = _parseexpr(expr) | |
512 | ... n = getsymbol(x[1]) |
|
512 | ... n = getsymbol(x[1]) | |
513 | ... return _buildfuncargs(x[2], context, exprmethods, n, argspec) |
|
513 | ... return _buildfuncargs(x[2], context, exprmethods, n, argspec) | |
514 | >>> list(fargs(b'a(l=1, k=2)', b'k l m').keys()) |
|
514 | >>> list(fargs(b'a(l=1, k=2)', b'k l m').keys()) | |
515 | ['l', 'k'] |
|
515 | ['l', 'k'] | |
516 | >>> args = fargs(b'a(opts=1, k=2)', b'**opts') |
|
516 | >>> args = fargs(b'a(opts=1, k=2)', b'**opts') | |
517 | >>> list(args.keys()), list(args[b'opts'].keys()) |
|
517 | >>> list(args.keys()), list(args[b'opts'].keys()) | |
518 | (['opts'], ['opts', 'k']) |
|
518 | (['opts'], ['opts', 'k']) | |
519 | """ |
|
519 | """ | |
520 |
|
520 | |||
521 | def compiledict(xs): |
|
521 | def compiledict(xs): | |
522 | return util.sortdict( |
|
522 | return util.sortdict( | |
523 | (k, compileexp(x, context, curmethods)) |
|
523 | (k, compileexp(x, context, curmethods)) | |
524 | for k, x in pycompat.iteritems(xs) |
|
524 | for k, x in pycompat.iteritems(xs) | |
525 | ) |
|
525 | ) | |
526 |
|
526 | |||
527 | def compilelist(xs): |
|
527 | def compilelist(xs): | |
528 | return [compileexp(x, context, curmethods) for x in xs] |
|
528 | return [compileexp(x, context, curmethods) for x in xs] | |
529 |
|
529 | |||
530 | if not argspec: |
|
530 | if not argspec: | |
531 | # filter or function with no argspec: return list of positional args |
|
531 | # filter or function with no argspec: return list of positional args | |
532 | return compilelist(getlist(exp)) |
|
532 | return compilelist(getlist(exp)) | |
533 |
|
533 | |||
534 | # function with argspec: return dict of named args |
|
534 | # function with argspec: return dict of named args | |
535 | _poskeys, varkey, _keys, optkey = argspec = parser.splitargspec(argspec) |
|
535 | _poskeys, varkey, _keys, optkey = argspec = parser.splitargspec(argspec) | |
536 | treeargs = parser.buildargsdict( |
|
536 | treeargs = parser.buildargsdict( | |
537 | getlist(exp), |
|
537 | getlist(exp), | |
538 | funcname, |
|
538 | funcname, | |
539 | argspec, |
|
539 | argspec, | |
540 | keyvaluenode=b'keyvalue', |
|
540 | keyvaluenode=b'keyvalue', | |
541 | keynode=b'symbol', |
|
541 | keynode=b'symbol', | |
542 | ) |
|
542 | ) | |
543 | compargs = util.sortdict() |
|
543 | compargs = util.sortdict() | |
544 | if varkey: |
|
544 | if varkey: | |
545 | compargs[varkey] = compilelist(treeargs.pop(varkey)) |
|
545 | compargs[varkey] = compilelist(treeargs.pop(varkey)) | |
546 | if optkey: |
|
546 | if optkey: | |
547 | compargs[optkey] = compiledict(treeargs.pop(optkey)) |
|
547 | compargs[optkey] = compiledict(treeargs.pop(optkey)) | |
548 | compargs.update(compiledict(treeargs)) |
|
548 | compargs.update(compiledict(treeargs)) | |
549 | return compargs |
|
549 | return compargs | |
550 |
|
550 | |||
551 |
|
551 | |||
552 | def buildkeyvaluepair(exp, content): |
|
552 | def buildkeyvaluepair(exp, content): | |
553 | raise error.ParseError(_(b"can't use a key-value pair in this context")) |
|
553 | raise error.ParseError(_(b"can't use a key-value pair in this context")) | |
554 |
|
554 | |||
555 |
|
555 | |||
556 | def buildlist(exp, context): |
|
556 | def buildlist(exp, context): | |
557 | raise error.ParseError( |
|
557 | raise error.ParseError( | |
558 | _(b"can't use a list in this context"), |
|
558 | _(b"can't use a list in this context"), | |
559 | hint=_(b'check place of comma and parens'), |
|
559 | hint=_(b'check place of comma and parens'), | |
560 | ) |
|
560 | ) | |
561 |
|
561 | |||
562 |
|
562 | |||
563 | # methods to interpret function arguments or inner expressions (e.g. {_(x)}) |
|
563 | # methods to interpret function arguments or inner expressions (e.g. {_(x)}) | |
564 | exprmethods = { |
|
564 | exprmethods = { | |
565 | b"integer": lambda e, c: (templateutil.runinteger, e[1]), |
|
565 | b"integer": lambda e, c: (templateutil.runinteger, e[1]), | |
566 | b"string": lambda e, c: (templateutil.runstring, e[1]), |
|
566 | b"string": lambda e, c: (templateutil.runstring, e[1]), | |
567 | b"symbol": lambda e, c: (templateutil.runsymbol, e[1]), |
|
567 | b"symbol": lambda e, c: (templateutil.runsymbol, e[1]), | |
568 | b"template": buildtemplate, |
|
568 | b"template": buildtemplate, | |
569 | b"group": lambda e, c: compileexp(e[1], c, exprmethods), |
|
569 | b"group": lambda e, c: compileexp(e[1], c, exprmethods), | |
570 | b".": buildmember, |
|
570 | b".": buildmember, | |
571 | b"|": buildfilter, |
|
571 | b"|": buildfilter, | |
572 | b"%": buildmap, |
|
572 | b"%": buildmap, | |
573 | b"func": buildfunc, |
|
573 | b"func": buildfunc, | |
574 | b"keyvalue": buildkeyvaluepair, |
|
574 | b"keyvalue": buildkeyvaluepair, | |
575 | b"list": buildlist, |
|
575 | b"list": buildlist, | |
576 | b"+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b), |
|
576 | b"+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b), | |
577 | b"-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b), |
|
577 | b"-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b), | |
578 | b"negate": buildnegate, |
|
578 | b"negate": buildnegate, | |
579 | b"*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b), |
|
579 | b"*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b), | |
580 | b"/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b), |
|
580 | b"/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b), | |
581 | } |
|
581 | } | |
582 |
|
582 | |||
583 | # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"}) |
|
583 | # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"}) | |
584 | methods = exprmethods.copy() |
|
584 | methods = exprmethods.copy() | |
585 | methods[b"integer"] = exprmethods[b"symbol"] # '{1}' as variable |
|
585 | methods[b"integer"] = exprmethods[b"symbol"] # '{1}' as variable | |
586 |
|
586 | |||
587 |
|
587 | |||
588 | class _aliasrules(parser.basealiasrules): |
|
588 | class _aliasrules(parser.basealiasrules): | |
589 | """Parsing and expansion rule set of template aliases""" |
|
589 | """Parsing and expansion rule set of template aliases""" | |
590 |
|
590 | |||
591 | _section = _(b'template alias') |
|
591 | _section = _(b'template alias') | |
592 | _parse = staticmethod(_parseexpr) |
|
592 | _parse = staticmethod(_parseexpr) | |
593 |
|
593 | |||
594 | @staticmethod |
|
594 | @staticmethod | |
595 | def _trygetfunc(tree): |
|
595 | def _trygetfunc(tree): | |
596 | """Return (name, args) if tree is func(...) or ...|filter; otherwise |
|
596 | """Return (name, args) if tree is func(...) or ...|filter; otherwise | |
597 | None""" |
|
597 | None""" | |
598 | if tree[0] == b'func' and tree[1][0] == b'symbol': |
|
598 | if tree[0] == b'func' and tree[1][0] == b'symbol': | |
599 | return tree[1][1], getlist(tree[2]) |
|
599 | return tree[1][1], getlist(tree[2]) | |
600 | if tree[0] == b'|' and tree[2][0] == b'symbol': |
|
600 | if tree[0] == b'|' and tree[2][0] == b'symbol': | |
601 | return tree[2][1], [tree[1]] |
|
601 | return tree[2][1], [tree[1]] | |
602 |
|
602 | |||
603 |
|
603 | |||
604 | def expandaliases(tree, aliases): |
|
604 | def expandaliases(tree, aliases): | |
605 | """Return new tree of aliases are expanded""" |
|
605 | """Return new tree of aliases are expanded""" | |
606 | aliasmap = _aliasrules.buildmap(aliases) |
|
606 | aliasmap = _aliasrules.buildmap(aliases) | |
607 | return _aliasrules.expand(aliasmap, tree) |
|
607 | return _aliasrules.expand(aliasmap, tree) | |
608 |
|
608 | |||
609 |
|
609 | |||
610 | # template engine |
|
610 | # template engine | |
611 |
|
611 | |||
612 |
|
612 | |||
613 | def unquotestring(s): |
|
613 | def unquotestring(s): | |
614 | '''unwrap quotes if any; otherwise returns unmodified string''' |
|
614 | '''unwrap quotes if any; otherwise returns unmodified string''' | |
615 | if len(s) < 2 or s[0] not in b"'\"" or s[0] != s[-1]: |
|
615 | if len(s) < 2 or s[0] not in b"'\"" or s[0] != s[-1]: | |
616 | return s |
|
616 | return s | |
617 | return s[1:-1] |
|
617 | return s[1:-1] | |
618 |
|
618 | |||
619 |
|
619 | |||
620 | class resourcemapper(object): # pytype: disable=ignored-metaclass |
|
620 | class resourcemapper(object): # pytype: disable=ignored-metaclass | |
621 | """Mapper of internal template resources""" |
|
621 | """Mapper of internal template resources""" | |
622 |
|
622 | |||
623 | __metaclass__ = abc.ABCMeta |
|
623 | __metaclass__ = abc.ABCMeta | |
624 |
|
624 | |||
625 | @abc.abstractmethod |
|
625 | @abc.abstractmethod | |
626 | def availablekeys(self, mapping): |
|
626 | def availablekeys(self, mapping): | |
627 | """Return a set of available resource keys based on the given mapping""" |
|
627 | """Return a set of available resource keys based on the given mapping""" | |
628 |
|
628 | |||
629 | @abc.abstractmethod |
|
629 | @abc.abstractmethod | |
630 | def knownkeys(self): |
|
630 | def knownkeys(self): | |
631 | """Return a set of supported resource keys""" |
|
631 | """Return a set of supported resource keys""" | |
632 |
|
632 | |||
633 | @abc.abstractmethod |
|
633 | @abc.abstractmethod | |
634 | def lookup(self, mapping, key): |
|
634 | def lookup(self, mapping, key): | |
635 | """Return a resource for the key if available; otherwise None""" |
|
635 | """Return a resource for the key if available; otherwise None""" | |
636 |
|
636 | |||
637 | @abc.abstractmethod |
|
637 | @abc.abstractmethod | |
638 | def populatemap(self, context, origmapping, newmapping): |
|
638 | def populatemap(self, context, origmapping, newmapping): | |
639 | """Return a dict of additional mapping items which should be paired |
|
639 | """Return a dict of additional mapping items which should be paired | |
640 | with the given new mapping""" |
|
640 | with the given new mapping""" | |
641 |
|
641 | |||
642 |
|
642 | |||
643 | class nullresourcemapper(resourcemapper): |
|
643 | class nullresourcemapper(resourcemapper): | |
644 | def availablekeys(self, mapping): |
|
644 | def availablekeys(self, mapping): | |
645 | return set() |
|
645 | return set() | |
646 |
|
646 | |||
647 | def knownkeys(self): |
|
647 | def knownkeys(self): | |
648 | return set() |
|
648 | return set() | |
649 |
|
649 | |||
650 | def lookup(self, mapping, key): |
|
650 | def lookup(self, mapping, key): | |
651 | return None |
|
651 | return None | |
652 |
|
652 | |||
653 | def populatemap(self, context, origmapping, newmapping): |
|
653 | def populatemap(self, context, origmapping, newmapping): | |
654 | return {} |
|
654 | return {} | |
655 |
|
655 | |||
656 |
|
656 | |||
657 | class engine(object): |
|
657 | class engine(object): | |
658 | '''template expansion engine. |
|
658 | '''template expansion engine. | |
659 |
|
659 | |||
660 | template expansion works like this. a map file contains key=value |
|
660 | template expansion works like this. a map file contains key=value | |
661 | pairs. if value is quoted, it is treated as string. otherwise, it |
|
661 | pairs. if value is quoted, it is treated as string. otherwise, it | |
662 | is treated as name of template file. |
|
662 | is treated as name of template file. | |
663 |
|
663 | |||
664 | templater is asked to expand a key in map. it looks up key, and |
|
664 | templater is asked to expand a key in map. it looks up key, and | |
665 | looks for strings like this: {foo}. it expands {foo} by looking up |
|
665 | looks for strings like this: {foo}. it expands {foo} by looking up | |
666 | foo in map, and substituting it. expansion is recursive: it stops |
|
666 | foo in map, and substituting it. expansion is recursive: it stops | |
667 | when there is no more {foo} to replace. |
|
667 | when there is no more {foo} to replace. | |
668 |
|
668 | |||
669 | expansion also allows formatting and filtering. |
|
669 | expansion also allows formatting and filtering. | |
670 |
|
670 | |||
671 | format uses key to expand each item in list. syntax is |
|
671 | format uses key to expand each item in list. syntax is | |
672 | {key%format}. |
|
672 | {key%format}. | |
673 |
|
673 | |||
674 | filter uses function to transform value. syntax is |
|
674 | filter uses function to transform value. syntax is | |
675 | {key|filter1|filter2|...}.''' |
|
675 | {key|filter1|filter2|...}.''' | |
676 |
|
676 | |||
677 | def __init__(self, loader, filters=None, defaults=None, resources=None): |
|
677 | def __init__(self, loader, filters=None, defaults=None, resources=None): | |
678 | self._loader = loader |
|
678 | self._loader = loader | |
679 | if filters is None: |
|
679 | if filters is None: | |
680 | filters = {} |
|
680 | filters = {} | |
681 | self._filters = filters |
|
681 | self._filters = filters | |
682 | self._funcs = templatefuncs.funcs # make this a parameter if needed |
|
682 | self._funcs = templatefuncs.funcs # make this a parameter if needed | |
683 | if defaults is None: |
|
683 | if defaults is None: | |
684 | defaults = {} |
|
684 | defaults = {} | |
685 | if resources is None: |
|
685 | if resources is None: | |
686 | resources = nullresourcemapper() |
|
686 | resources = nullresourcemapper() | |
687 | self._defaults = defaults |
|
687 | self._defaults = defaults | |
688 | self._resources = resources |
|
688 | self._resources = resources | |
689 | self._cache = {} # key: (func, data) |
|
689 | self._cache = {} # key: (func, data) | |
690 | self._tmplcache = {} # literal template: (func, data) |
|
690 | self._tmplcache = {} # literal template: (func, data) | |
691 |
|
691 | |||
692 | def overlaymap(self, origmapping, newmapping): |
|
692 | def overlaymap(self, origmapping, newmapping): | |
693 | """Create combined mapping from the original mapping and partial |
|
693 | """Create combined mapping from the original mapping and partial | |
694 | mapping to override the original""" |
|
694 | mapping to override the original""" | |
695 | # do not copy symbols which overrides the defaults depending on |
|
695 | # do not copy symbols which overrides the defaults depending on | |
696 | # new resources, so the defaults will be re-evaluated (issue5612) |
|
696 | # new resources, so the defaults will be re-evaluated (issue5612) | |
697 | knownres = self._resources.knownkeys() |
|
697 | knownres = self._resources.knownkeys() | |
698 | newres = self._resources.availablekeys(newmapping) |
|
698 | newres = self._resources.availablekeys(newmapping) | |
699 | mapping = { |
|
699 | mapping = { | |
700 | k: v |
|
700 | k: v | |
701 | for k, v in pycompat.iteritems(origmapping) |
|
701 | for k, v in pycompat.iteritems(origmapping) | |
702 | if ( |
|
702 | if ( | |
703 | k in knownres # not a symbol per self.symbol() |
|
703 | k in knownres # not a symbol per self.symbol() | |
704 | or newres.isdisjoint(self._defaultrequires(k)) |
|
704 | or newres.isdisjoint(self._defaultrequires(k)) | |
705 | ) |
|
705 | ) | |
706 | } |
|
706 | } | |
707 | mapping.update(newmapping) |
|
707 | mapping.update(newmapping) | |
708 | mapping.update( |
|
708 | mapping.update( | |
709 | self._resources.populatemap(self, origmapping, newmapping) |
|
709 | self._resources.populatemap(self, origmapping, newmapping) | |
710 | ) |
|
710 | ) | |
711 | return mapping |
|
711 | return mapping | |
712 |
|
712 | |||
713 | def _defaultrequires(self, key): |
|
713 | def _defaultrequires(self, key): | |
714 | """Resource keys required by the specified default symbol function""" |
|
714 | """Resource keys required by the specified default symbol function""" | |
715 | v = self._defaults.get(key) |
|
715 | v = self._defaults.get(key) | |
716 | if v is None or not callable(v): |
|
716 | if v is None or not callable(v): | |
717 | return () |
|
717 | return () | |
718 | return getattr(v, '_requires', ()) |
|
718 | return getattr(v, '_requires', ()) | |
719 |
|
719 | |||
720 | def symbol(self, mapping, key): |
|
720 | def symbol(self, mapping, key): | |
721 | """Resolve symbol to value or function; None if nothing found""" |
|
721 | """Resolve symbol to value or function; None if nothing found""" | |
722 | v = None |
|
722 | v = None | |
723 | if key not in self._resources.knownkeys(): |
|
723 | if key not in self._resources.knownkeys(): | |
724 | v = mapping.get(key) |
|
724 | v = mapping.get(key) | |
725 | if v is None: |
|
725 | if v is None: | |
726 | v = self._defaults.get(key) |
|
726 | v = self._defaults.get(key) | |
727 | return v |
|
727 | return v | |
728 |
|
728 | |||
729 | def availableresourcekeys(self, mapping): |
|
729 | def availableresourcekeys(self, mapping): | |
730 | """Return a set of available resource keys based on the given mapping""" |
|
730 | """Return a set of available resource keys based on the given mapping""" | |
731 | return self._resources.availablekeys(mapping) |
|
731 | return self._resources.availablekeys(mapping) | |
732 |
|
732 | |||
733 | def knownresourcekeys(self): |
|
733 | def knownresourcekeys(self): | |
734 | """Return a set of supported resource keys""" |
|
734 | """Return a set of supported resource keys""" | |
735 | return self._resources.knownkeys() |
|
735 | return self._resources.knownkeys() | |
736 |
|
736 | |||
737 | def resource(self, mapping, key): |
|
737 | def resource(self, mapping, key): | |
738 | """Return internal data (e.g. cache) used for keyword/function |
|
738 | """Return internal data (e.g. cache) used for keyword/function | |
739 | evaluation""" |
|
739 | evaluation""" | |
740 | v = self._resources.lookup(mapping, key) |
|
740 | v = self._resources.lookup(mapping, key) | |
741 | if v is None: |
|
741 | if v is None: | |
742 | raise templateutil.ResourceUnavailable( |
|
742 | raise templateutil.ResourceUnavailable( | |
743 | _(b'template resource not available: %s') % key |
|
743 | _(b'template resource not available: %s') % key | |
744 | ) |
|
744 | ) | |
745 | return v |
|
745 | return v | |
746 |
|
746 | |||
747 | def _load(self, t): |
|
747 | def _load(self, t): | |
748 | '''load, parse, and cache a template''' |
|
748 | '''load, parse, and cache a template''' | |
749 | if t not in self._cache: |
|
749 | if t not in self._cache: | |
750 | x = self._loader(t) |
|
750 | x = self._loader(t) | |
751 | # put poison to cut recursion while compiling 't' |
|
751 | # put poison to cut recursion while compiling 't' | |
752 | self._cache[t] = (_runrecursivesymbol, t) |
|
752 | self._cache[t] = (_runrecursivesymbol, t) | |
753 | try: |
|
753 | try: | |
754 | self._cache[t] = compileexp(x, self, methods) |
|
754 | self._cache[t] = compileexp(x, self, methods) | |
755 | except: # re-raises |
|
755 | except: # re-raises | |
756 | del self._cache[t] |
|
756 | del self._cache[t] | |
757 | raise |
|
757 | raise | |
758 | return self._cache[t] |
|
758 | return self._cache[t] | |
759 |
|
759 | |||
760 | def _parse(self, tmpl): |
|
760 | def _parse(self, tmpl): | |
761 | """Parse and cache a literal template""" |
|
761 | """Parse and cache a literal template""" | |
762 | if tmpl not in self._tmplcache: |
|
762 | if tmpl not in self._tmplcache: | |
763 | x = parse(tmpl) |
|
763 | x = parse(tmpl) | |
764 | self._tmplcache[tmpl] = compileexp(x, self, methods) |
|
764 | self._tmplcache[tmpl] = compileexp(x, self, methods) | |
765 | return self._tmplcache[tmpl] |
|
765 | return self._tmplcache[tmpl] | |
766 |
|
766 | |||
767 | def preload(self, t): |
|
767 | def preload(self, t): | |
768 | """Load, parse, and cache the specified template if available""" |
|
768 | """Load, parse, and cache the specified template if available""" | |
769 | try: |
|
769 | try: | |
770 | self._load(t) |
|
770 | self._load(t) | |
771 | return True |
|
771 | return True | |
772 | except templateutil.TemplateNotFound: |
|
772 | except templateutil.TemplateNotFound: | |
773 | return False |
|
773 | return False | |
774 |
|
774 | |||
775 | def process(self, t, mapping): |
|
775 | def process(self, t, mapping): | |
776 | '''Perform expansion. t is name of map element to expand. |
|
776 | '''Perform expansion. t is name of map element to expand. | |
777 | mapping contains added elements for use during expansion. Is a |
|
777 | mapping contains added elements for use during expansion. Is a | |
778 | generator.''' |
|
778 | generator.''' | |
779 | func, data = self._load(t) |
|
779 | func, data = self._load(t) | |
780 | return self._expand(func, data, mapping) |
|
780 | return self._expand(func, data, mapping) | |
781 |
|
781 | |||
782 | def expand(self, tmpl, mapping): |
|
782 | def expand(self, tmpl, mapping): | |
783 | """Perform expansion over a literal template |
|
783 | """Perform expansion over a literal template | |
784 |
|
784 | |||
785 | No user aliases will be expanded since this is supposed to be called |
|
785 | No user aliases will be expanded since this is supposed to be called | |
786 | with an internal template string. |
|
786 | with an internal template string. | |
787 | """ |
|
787 | """ | |
788 | func, data = self._parse(tmpl) |
|
788 | func, data = self._parse(tmpl) | |
789 | return self._expand(func, data, mapping) |
|
789 | return self._expand(func, data, mapping) | |
790 |
|
790 | |||
791 | def _expand(self, func, data, mapping): |
|
791 | def _expand(self, func, data, mapping): | |
792 | # populate additional items only if they don't exist in the given |
|
792 | # populate additional items only if they don't exist in the given | |
793 | # mapping. this is slightly different from overlaymap() because the |
|
793 | # mapping. this is slightly different from overlaymap() because the | |
794 | # initial 'revcache' may contain pre-computed items. |
|
794 | # initial 'revcache' may contain pre-computed items. | |
795 | extramapping = self._resources.populatemap(self, {}, mapping) |
|
795 | extramapping = self._resources.populatemap(self, {}, mapping) | |
796 | if extramapping: |
|
796 | if extramapping: | |
797 | extramapping.update(mapping) |
|
797 | extramapping.update(mapping) | |
798 | mapping = extramapping |
|
798 | mapping = extramapping | |
799 | return templateutil.flatten(self, mapping, func(self, mapping, data)) |
|
799 | return templateutil.flatten(self, mapping, func(self, mapping, data)) | |
800 |
|
800 | |||
801 |
|
801 | |||
802 | def stylelist(): |
|
802 | def stylelist(): | |
803 | path = templatedir() |
|
803 | path = templatedir() | |
804 | if not path: |
|
804 | if not path: | |
805 | return _(b'no templates found, try `hg debuginstall` for more info') |
|
805 | return _(b'no templates found, try `hg debuginstall` for more info') | |
806 | dirlist = os.listdir(path) |
|
806 | dirlist = os.listdir(path) | |
807 | stylelist = [] |
|
807 | stylelist = [] | |
808 | for file in dirlist: |
|
808 | for file in dirlist: | |
809 | split = file.split(b".") |
|
809 | split = file.split(b".") | |
810 | if split[-1] in (b'orig', b'rej'): |
|
810 | if split[-1] in (b'orig', b'rej'): | |
811 | continue |
|
811 | continue | |
812 | if split[0] == b"map-cmdline": |
|
812 | if split[0] == b"map-cmdline": | |
813 | stylelist.append(split[1]) |
|
813 | stylelist.append(split[1]) | |
814 | return b", ".join(sorted(stylelist)) |
|
814 | return b", ".join(sorted(stylelist)) | |
815 |
|
815 | |||
816 |
|
816 | |||
817 | def _readmapfile(mapfile): |
|
817 | def _readmapfile(mapfile): | |
818 | """Load template elements from the given map file""" |
|
818 | """Load template elements from the given map file""" | |
819 | if not os.path.exists(mapfile): |
|
819 | if not os.path.exists(mapfile): | |
820 | raise error.Abort( |
|
820 | raise error.Abort( | |
821 | _(b"style '%s' not found") % mapfile, |
|
821 | _(b"style '%s' not found") % mapfile, | |
822 | hint=_(b"available styles: %s") % stylelist(), |
|
822 | hint=_(b"available styles: %s") % stylelist(), | |
823 | ) |
|
823 | ) | |
824 |
|
824 | |||
825 | base = os.path.dirname(mapfile) |
|
825 | base = os.path.dirname(mapfile) | |
826 | conf = config.config() |
|
826 | conf = config.config() | |
827 |
|
827 | |||
828 |
def include(rel, |
|
828 | def include(rel, remap, sections): | |
829 | templatedirs = [base, templatedir()] |
|
829 | templatedirs = [base, templatedir()] | |
830 | for dir in templatedirs: |
|
830 | for dir in templatedirs: | |
831 | if dir is None: |
|
831 | if dir is None: | |
832 | continue |
|
832 | continue | |
833 | abs = os.path.normpath(os.path.join(dir, rel)) |
|
833 | abs = os.path.normpath(os.path.join(dir, rel)) | |
834 | if os.path.isfile(abs): |
|
834 | if os.path.isfile(abs): | |
835 | data = util.posixfile(abs, b'rb').read() |
|
835 | data = util.posixfile(abs, b'rb').read() | |
836 | conf.parse( |
|
836 | conf.parse( | |
837 | abs, data, sections=sections, remap=remap, include=include |
|
837 | abs, data, sections=sections, remap=remap, include=include | |
838 | ) |
|
838 | ) | |
839 | break |
|
839 | break | |
840 |
|
840 | |||
841 | data = util.posixfile(mapfile, b'rb').read() |
|
841 | data = util.posixfile(mapfile, b'rb').read() | |
842 | conf.parse(mapfile, data, remap={b'': b'templates'}, include=include) |
|
842 | conf.parse(mapfile, data, remap={b'': b'templates'}, include=include) | |
843 |
|
843 | |||
844 | cache = {} |
|
844 | cache = {} | |
845 | tmap = {} |
|
845 | tmap = {} | |
846 | aliases = [] |
|
846 | aliases = [] | |
847 |
|
847 | |||
848 | val = conf.get(b'templates', b'__base__') |
|
848 | val = conf.get(b'templates', b'__base__') | |
849 | if val and val[0] not in b"'\"": |
|
849 | if val and val[0] not in b"'\"": | |
850 | # treat as a pointer to a base class for this style |
|
850 | # treat as a pointer to a base class for this style | |
851 | path = util.normpath(os.path.join(base, val)) |
|
851 | path = util.normpath(os.path.join(base, val)) | |
852 |
|
852 | |||
853 | # fallback check in template paths |
|
853 | # fallback check in template paths | |
854 | if not os.path.exists(path): |
|
854 | if not os.path.exists(path): | |
855 | dir = templatedir() |
|
855 | dir = templatedir() | |
856 | if dir is not None: |
|
856 | if dir is not None: | |
857 | p2 = util.normpath(os.path.join(dir, val)) |
|
857 | p2 = util.normpath(os.path.join(dir, val)) | |
858 | if os.path.isfile(p2): |
|
858 | if os.path.isfile(p2): | |
859 | path = p2 |
|
859 | path = p2 | |
860 | else: |
|
860 | else: | |
861 | p3 = util.normpath(os.path.join(p2, b"map")) |
|
861 | p3 = util.normpath(os.path.join(p2, b"map")) | |
862 | if os.path.isfile(p3): |
|
862 | if os.path.isfile(p3): | |
863 | path = p3 |
|
863 | path = p3 | |
864 |
|
864 | |||
865 | cache, tmap, aliases = _readmapfile(path) |
|
865 | cache, tmap, aliases = _readmapfile(path) | |
866 |
|
866 | |||
867 | for key, val in conf[b'templates'].items(): |
|
867 | for key, val in conf[b'templates'].items(): | |
868 | if not val: |
|
868 | if not val: | |
869 | raise error.ParseError( |
|
869 | raise error.ParseError( | |
870 | _(b'missing value'), conf.source(b'templates', key) |
|
870 | _(b'missing value'), conf.source(b'templates', key) | |
871 | ) |
|
871 | ) | |
872 | if val[0] in b"'\"": |
|
872 | if val[0] in b"'\"": | |
873 | if val[0] != val[-1]: |
|
873 | if val[0] != val[-1]: | |
874 | raise error.ParseError( |
|
874 | raise error.ParseError( | |
875 | _(b'unmatched quotes'), conf.source(b'templates', key) |
|
875 | _(b'unmatched quotes'), conf.source(b'templates', key) | |
876 | ) |
|
876 | ) | |
877 | cache[key] = unquotestring(val) |
|
877 | cache[key] = unquotestring(val) | |
878 | elif key != b'__base__': |
|
878 | elif key != b'__base__': | |
879 | tmap[key] = os.path.join(base, val) |
|
879 | tmap[key] = os.path.join(base, val) | |
880 | aliases.extend(conf[b'templatealias'].items()) |
|
880 | aliases.extend(conf[b'templatealias'].items()) | |
881 | return cache, tmap, aliases |
|
881 | return cache, tmap, aliases | |
882 |
|
882 | |||
883 |
|
883 | |||
884 | class loader(object): |
|
884 | class loader(object): | |
885 | """Load template fragments optionally from a map file""" |
|
885 | """Load template fragments optionally from a map file""" | |
886 |
|
886 | |||
887 | def __init__(self, cache, aliases): |
|
887 | def __init__(self, cache, aliases): | |
888 | if cache is None: |
|
888 | if cache is None: | |
889 | cache = {} |
|
889 | cache = {} | |
890 | self.cache = cache.copy() |
|
890 | self.cache = cache.copy() | |
891 | self._map = {} |
|
891 | self._map = {} | |
892 | self._aliasmap = _aliasrules.buildmap(aliases) |
|
892 | self._aliasmap = _aliasrules.buildmap(aliases) | |
893 |
|
893 | |||
894 | def __contains__(self, key): |
|
894 | def __contains__(self, key): | |
895 | return key in self.cache or key in self._map |
|
895 | return key in self.cache or key in self._map | |
896 |
|
896 | |||
897 | def load(self, t): |
|
897 | def load(self, t): | |
898 | """Get parsed tree for the given template name. Use a local cache.""" |
|
898 | """Get parsed tree for the given template name. Use a local cache.""" | |
899 | if t not in self.cache: |
|
899 | if t not in self.cache: | |
900 | try: |
|
900 | try: | |
901 | self.cache[t] = util.readfile(self._map[t]) |
|
901 | self.cache[t] = util.readfile(self._map[t]) | |
902 | except KeyError as inst: |
|
902 | except KeyError as inst: | |
903 | raise templateutil.TemplateNotFound( |
|
903 | raise templateutil.TemplateNotFound( | |
904 | _(b'"%s" not in template map') % inst.args[0] |
|
904 | _(b'"%s" not in template map') % inst.args[0] | |
905 | ) |
|
905 | ) | |
906 | except IOError as inst: |
|
906 | except IOError as inst: | |
907 | reason = _(b'template file %s: %s') % ( |
|
907 | reason = _(b'template file %s: %s') % ( | |
908 | self._map[t], |
|
908 | self._map[t], | |
909 | stringutil.forcebytestr(inst.args[1]), |
|
909 | stringutil.forcebytestr(inst.args[1]), | |
910 | ) |
|
910 | ) | |
911 | raise IOError(inst.args[0], encoding.strfromlocal(reason)) |
|
911 | raise IOError(inst.args[0], encoding.strfromlocal(reason)) | |
912 | return self._parse(self.cache[t]) |
|
912 | return self._parse(self.cache[t]) | |
913 |
|
913 | |||
914 | def _parse(self, tmpl): |
|
914 | def _parse(self, tmpl): | |
915 | x = parse(tmpl) |
|
915 | x = parse(tmpl) | |
916 | if self._aliasmap: |
|
916 | if self._aliasmap: | |
917 | x = _aliasrules.expand(self._aliasmap, x) |
|
917 | x = _aliasrules.expand(self._aliasmap, x) | |
918 | return x |
|
918 | return x | |
919 |
|
919 | |||
920 | def _findsymbolsused(self, tree, syms): |
|
920 | def _findsymbolsused(self, tree, syms): | |
921 | if not tree: |
|
921 | if not tree: | |
922 | return |
|
922 | return | |
923 | op = tree[0] |
|
923 | op = tree[0] | |
924 | if op == b'symbol': |
|
924 | if op == b'symbol': | |
925 | s = tree[1] |
|
925 | s = tree[1] | |
926 | if s in syms[0]: |
|
926 | if s in syms[0]: | |
927 | return # avoid recursion: s -> cache[s] -> s |
|
927 | return # avoid recursion: s -> cache[s] -> s | |
928 | syms[0].add(s) |
|
928 | syms[0].add(s) | |
929 | if s in self.cache or s in self._map: |
|
929 | if s in self.cache or s in self._map: | |
930 | # s may be a reference for named template |
|
930 | # s may be a reference for named template | |
931 | self._findsymbolsused(self.load(s), syms) |
|
931 | self._findsymbolsused(self.load(s), syms) | |
932 | return |
|
932 | return | |
933 | if op in {b'integer', b'string'}: |
|
933 | if op in {b'integer', b'string'}: | |
934 | return |
|
934 | return | |
935 | # '{arg|func}' == '{func(arg)}' |
|
935 | # '{arg|func}' == '{func(arg)}' | |
936 | if op == b'|': |
|
936 | if op == b'|': | |
937 | syms[1].add(getsymbol(tree[2])) |
|
937 | syms[1].add(getsymbol(tree[2])) | |
938 | self._findsymbolsused(tree[1], syms) |
|
938 | self._findsymbolsused(tree[1], syms) | |
939 | return |
|
939 | return | |
940 | if op == b'func': |
|
940 | if op == b'func': | |
941 | syms[1].add(getsymbol(tree[1])) |
|
941 | syms[1].add(getsymbol(tree[1])) | |
942 | self._findsymbolsused(tree[2], syms) |
|
942 | self._findsymbolsused(tree[2], syms) | |
943 | return |
|
943 | return | |
944 | for x in tree[1:]: |
|
944 | for x in tree[1:]: | |
945 | self._findsymbolsused(x, syms) |
|
945 | self._findsymbolsused(x, syms) | |
946 |
|
946 | |||
947 | def symbolsused(self, t): |
|
947 | def symbolsused(self, t): | |
948 | """Look up (keywords, filters/functions) referenced from the name |
|
948 | """Look up (keywords, filters/functions) referenced from the name | |
949 | template 't' |
|
949 | template 't' | |
950 |
|
950 | |||
951 | This may load additional templates from the map file. |
|
951 | This may load additional templates from the map file. | |
952 | """ |
|
952 | """ | |
953 | syms = (set(), set()) |
|
953 | syms = (set(), set()) | |
954 | self._findsymbolsused(self.load(t), syms) |
|
954 | self._findsymbolsused(self.load(t), syms) | |
955 | return syms |
|
955 | return syms | |
956 |
|
956 | |||
957 |
|
957 | |||
958 | class templater(object): |
|
958 | class templater(object): | |
959 | def __init__( |
|
959 | def __init__( | |
960 | self, |
|
960 | self, | |
961 | filters=None, |
|
961 | filters=None, | |
962 | defaults=None, |
|
962 | defaults=None, | |
963 | resources=None, |
|
963 | resources=None, | |
964 | cache=None, |
|
964 | cache=None, | |
965 | aliases=(), |
|
965 | aliases=(), | |
966 | minchunk=1024, |
|
966 | minchunk=1024, | |
967 | maxchunk=65536, |
|
967 | maxchunk=65536, | |
968 | ): |
|
968 | ): | |
969 | """Create template engine optionally with preloaded template fragments |
|
969 | """Create template engine optionally with preloaded template fragments | |
970 |
|
970 | |||
971 | - ``filters``: a dict of functions to transform a value into another. |
|
971 | - ``filters``: a dict of functions to transform a value into another. | |
972 | - ``defaults``: a dict of symbol values/functions; may be overridden |
|
972 | - ``defaults``: a dict of symbol values/functions; may be overridden | |
973 | by a ``mapping`` dict. |
|
973 | by a ``mapping`` dict. | |
974 | - ``resources``: a resourcemapper object to look up internal data |
|
974 | - ``resources``: a resourcemapper object to look up internal data | |
975 | (e.g. cache), inaccessible from user template. |
|
975 | (e.g. cache), inaccessible from user template. | |
976 | - ``cache``: a dict of preloaded template fragments. |
|
976 | - ``cache``: a dict of preloaded template fragments. | |
977 | - ``aliases``: a list of alias (name, replacement) pairs. |
|
977 | - ``aliases``: a list of alias (name, replacement) pairs. | |
978 |
|
978 | |||
979 | self.cache may be updated later to register additional template |
|
979 | self.cache may be updated later to register additional template | |
980 | fragments. |
|
980 | fragments. | |
981 | """ |
|
981 | """ | |
982 | allfilters = templatefilters.filters.copy() |
|
982 | allfilters = templatefilters.filters.copy() | |
983 | if filters: |
|
983 | if filters: | |
984 | allfilters.update(filters) |
|
984 | allfilters.update(filters) | |
985 | self._loader = loader(cache, aliases) |
|
985 | self._loader = loader(cache, aliases) | |
986 | self._proc = engine(self._loader.load, allfilters, defaults, resources) |
|
986 | self._proc = engine(self._loader.load, allfilters, defaults, resources) | |
987 | self._minchunk, self._maxchunk = minchunk, maxchunk |
|
987 | self._minchunk, self._maxchunk = minchunk, maxchunk | |
988 |
|
988 | |||
989 | @classmethod |
|
989 | @classmethod | |
990 | def frommapfile( |
|
990 | def frommapfile( | |
991 | cls, |
|
991 | cls, | |
992 | mapfile, |
|
992 | mapfile, | |
993 | filters=None, |
|
993 | filters=None, | |
994 | defaults=None, |
|
994 | defaults=None, | |
995 | resources=None, |
|
995 | resources=None, | |
996 | cache=None, |
|
996 | cache=None, | |
997 | minchunk=1024, |
|
997 | minchunk=1024, | |
998 | maxchunk=65536, |
|
998 | maxchunk=65536, | |
999 | ): |
|
999 | ): | |
1000 | """Create templater from the specified map file""" |
|
1000 | """Create templater from the specified map file""" | |
1001 | t = cls(filters, defaults, resources, cache, [], minchunk, maxchunk) |
|
1001 | t = cls(filters, defaults, resources, cache, [], minchunk, maxchunk) | |
1002 | cache, tmap, aliases = _readmapfile(mapfile) |
|
1002 | cache, tmap, aliases = _readmapfile(mapfile) | |
1003 | t._loader.cache.update(cache) |
|
1003 | t._loader.cache.update(cache) | |
1004 | t._loader._map = tmap |
|
1004 | t._loader._map = tmap | |
1005 | t._loader._aliasmap = _aliasrules.buildmap(aliases) |
|
1005 | t._loader._aliasmap = _aliasrules.buildmap(aliases) | |
1006 | return t |
|
1006 | return t | |
1007 |
|
1007 | |||
1008 | def __contains__(self, key): |
|
1008 | def __contains__(self, key): | |
1009 | return key in self._loader |
|
1009 | return key in self._loader | |
1010 |
|
1010 | |||
1011 | @property |
|
1011 | @property | |
1012 | def cache(self): |
|
1012 | def cache(self): | |
1013 | return self._loader.cache |
|
1013 | return self._loader.cache | |
1014 |
|
1014 | |||
1015 | # for highlight extension to insert one-time 'colorize' filter |
|
1015 | # for highlight extension to insert one-time 'colorize' filter | |
1016 | @property |
|
1016 | @property | |
1017 | def _filters(self): |
|
1017 | def _filters(self): | |
1018 | return self._proc._filters |
|
1018 | return self._proc._filters | |
1019 |
|
1019 | |||
1020 | @property |
|
1020 | @property | |
1021 | def defaults(self): |
|
1021 | def defaults(self): | |
1022 | return self._proc._defaults |
|
1022 | return self._proc._defaults | |
1023 |
|
1023 | |||
1024 | def load(self, t): |
|
1024 | def load(self, t): | |
1025 | """Get parsed tree for the given template name. Use a local cache.""" |
|
1025 | """Get parsed tree for the given template name. Use a local cache.""" | |
1026 | return self._loader.load(t) |
|
1026 | return self._loader.load(t) | |
1027 |
|
1027 | |||
1028 | def symbolsuseddefault(self): |
|
1028 | def symbolsuseddefault(self): | |
1029 | """Look up (keywords, filters/functions) referenced from the default |
|
1029 | """Look up (keywords, filters/functions) referenced from the default | |
1030 | unnamed template |
|
1030 | unnamed template | |
1031 |
|
1031 | |||
1032 | This may load additional templates from the map file. |
|
1032 | This may load additional templates from the map file. | |
1033 | """ |
|
1033 | """ | |
1034 | return self.symbolsused(b'') |
|
1034 | return self.symbolsused(b'') | |
1035 |
|
1035 | |||
1036 | def symbolsused(self, t): |
|
1036 | def symbolsused(self, t): | |
1037 | """Look up (keywords, filters/functions) referenced from the name |
|
1037 | """Look up (keywords, filters/functions) referenced from the name | |
1038 | template 't' |
|
1038 | template 't' | |
1039 |
|
1039 | |||
1040 | This may load additional templates from the map file. |
|
1040 | This may load additional templates from the map file. | |
1041 | """ |
|
1041 | """ | |
1042 | return self._loader.symbolsused(t) |
|
1042 | return self._loader.symbolsused(t) | |
1043 |
|
1043 | |||
1044 | def renderdefault(self, mapping): |
|
1044 | def renderdefault(self, mapping): | |
1045 | """Render the default unnamed template and return result as string""" |
|
1045 | """Render the default unnamed template and return result as string""" | |
1046 | return self.render(b'', mapping) |
|
1046 | return self.render(b'', mapping) | |
1047 |
|
1047 | |||
1048 | def render(self, t, mapping): |
|
1048 | def render(self, t, mapping): | |
1049 | """Render the specified named template and return result as string""" |
|
1049 | """Render the specified named template and return result as string""" | |
1050 | return b''.join(self.generate(t, mapping)) |
|
1050 | return b''.join(self.generate(t, mapping)) | |
1051 |
|
1051 | |||
1052 | def generate(self, t, mapping): |
|
1052 | def generate(self, t, mapping): | |
1053 | """Return a generator that renders the specified named template and |
|
1053 | """Return a generator that renders the specified named template and | |
1054 | yields chunks""" |
|
1054 | yields chunks""" | |
1055 | stream = self._proc.process(t, mapping) |
|
1055 | stream = self._proc.process(t, mapping) | |
1056 | if self._minchunk: |
|
1056 | if self._minchunk: | |
1057 | stream = util.increasingchunks( |
|
1057 | stream = util.increasingchunks( | |
1058 | stream, min=self._minchunk, max=self._maxchunk |
|
1058 | stream, min=self._minchunk, max=self._maxchunk | |
1059 | ) |
|
1059 | ) | |
1060 | return stream |
|
1060 | return stream | |
1061 |
|
1061 | |||
1062 |
|
1062 | |||
1063 | def templatedir(): |
|
1063 | def templatedir(): | |
1064 | '''return the directory used for template files, or None.''' |
|
1064 | '''return the directory used for template files, or None.''' | |
1065 | path = os.path.normpath(os.path.join(resourceutil.datapath, b'templates')) |
|
1065 | path = os.path.normpath(os.path.join(resourceutil.datapath, b'templates')) | |
1066 | return path if os.path.isdir(path) else None |
|
1066 | return path if os.path.isdir(path) else None | |
1067 |
|
1067 | |||
1068 |
|
1068 | |||
1069 | def templatepath(name): |
|
1069 | def templatepath(name): | |
1070 | '''return location of template file. returns None if not found.''' |
|
1070 | '''return location of template file. returns None if not found.''' | |
1071 | dir = templatedir() |
|
1071 | dir = templatedir() | |
1072 | if dir is None: |
|
1072 | if dir is None: | |
1073 | return None |
|
1073 | return None | |
1074 | f = os.path.join(templatedir(), name) |
|
1074 | f = os.path.join(templatedir(), name) | |
1075 | if f and os.path.exists(f): |
|
1075 | if f and os.path.exists(f): | |
1076 | return f |
|
1076 | return f | |
1077 | return None |
|
1077 | return None | |
1078 |
|
1078 | |||
1079 |
|
1079 | |||
1080 | def stylemap(styles, path=None): |
|
1080 | def stylemap(styles, path=None): | |
1081 | """Return path to mapfile for a given style. |
|
1081 | """Return path to mapfile for a given style. | |
1082 |
|
1082 | |||
1083 | Searches mapfile in the following locations: |
|
1083 | Searches mapfile in the following locations: | |
1084 | 1. templatepath/style/map |
|
1084 | 1. templatepath/style/map | |
1085 | 2. templatepath/map-style |
|
1085 | 2. templatepath/map-style | |
1086 | 3. templatepath/map |
|
1086 | 3. templatepath/map | |
1087 | """ |
|
1087 | """ | |
1088 |
|
1088 | |||
1089 | if path is None: |
|
1089 | if path is None: | |
1090 | path = templatedir() |
|
1090 | path = templatedir() | |
1091 |
|
1091 | |||
1092 | if isinstance(styles, bytes): |
|
1092 | if isinstance(styles, bytes): | |
1093 | styles = [styles] |
|
1093 | styles = [styles] | |
1094 |
|
1094 | |||
1095 | if path is not None: |
|
1095 | if path is not None: | |
1096 | for style in styles: |
|
1096 | for style in styles: | |
1097 | # only plain name is allowed to honor template paths |
|
1097 | # only plain name is allowed to honor template paths | |
1098 | if ( |
|
1098 | if ( | |
1099 | not style |
|
1099 | not style | |
1100 | or style in (pycompat.oscurdir, pycompat.ospardir) |
|
1100 | or style in (pycompat.oscurdir, pycompat.ospardir) | |
1101 | or pycompat.ossep in style |
|
1101 | or pycompat.ossep in style | |
1102 | or pycompat.osaltsep |
|
1102 | or pycompat.osaltsep | |
1103 | and pycompat.osaltsep in style |
|
1103 | and pycompat.osaltsep in style | |
1104 | ): |
|
1104 | ): | |
1105 | continue |
|
1105 | continue | |
1106 | locations = [os.path.join(style, b'map'), b'map-' + style] |
|
1106 | locations = [os.path.join(style, b'map'), b'map-' + style] | |
1107 | locations.append(b'map') |
|
1107 | locations.append(b'map') | |
1108 |
|
1108 | |||
1109 | for location in locations: |
|
1109 | for location in locations: | |
1110 | mapfile = os.path.join(path, location) |
|
1110 | mapfile = os.path.join(path, location) | |
1111 | if os.path.isfile(mapfile): |
|
1111 | if os.path.isfile(mapfile): | |
1112 | return style, mapfile |
|
1112 | return style, mapfile | |
1113 |
|
1113 | |||
1114 | raise RuntimeError(b"No hgweb templates found in %r" % path) |
|
1114 | raise RuntimeError(b"No hgweb templates found in %r" % path) |
General Comments 0
You need to be logged in to leave comments.
Login now