Show More
@@ -1,579 +1,577 b'' | |||||
1 | # formatter.py - generic output formatting for mercurial |
|
1 | # formatter.py - generic output formatting for mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2012 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2012 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | """Generic output formatting for Mercurial |
|
8 | """Generic output formatting for Mercurial | |
9 |
|
9 | |||
10 | The formatter provides API to show data in various ways. The following |
|
10 | The formatter provides API to show data in various ways. The following | |
11 | functions should be used in place of ui.write(): |
|
11 | functions should be used in place of ui.write(): | |
12 |
|
12 | |||
13 | - fm.write() for unconditional output |
|
13 | - fm.write() for unconditional output | |
14 | - fm.condwrite() to show some extra data conditionally in plain output |
|
14 | - fm.condwrite() to show some extra data conditionally in plain output | |
15 | - fm.context() to provide changectx to template output |
|
15 | - fm.context() to provide changectx to template output | |
16 | - fm.data() to provide extra data to JSON or template output |
|
16 | - fm.data() to provide extra data to JSON or template output | |
17 | - fm.plain() to show raw text that isn't provided to JSON or template output |
|
17 | - fm.plain() to show raw text that isn't provided to JSON or template output | |
18 |
|
18 | |||
19 | To show structured data (e.g. date tuples, dicts, lists), apply fm.format*() |
|
19 | To show structured data (e.g. date tuples, dicts, lists), apply fm.format*() | |
20 | beforehand so the data is converted to the appropriate data type. Use |
|
20 | beforehand so the data is converted to the appropriate data type. Use | |
21 | fm.isplain() if you need to convert or format data conditionally which isn't |
|
21 | fm.isplain() if you need to convert or format data conditionally which isn't | |
22 | supported by the formatter API. |
|
22 | supported by the formatter API. | |
23 |
|
23 | |||
24 | To build nested structure (i.e. a list of dicts), use fm.nested(). |
|
24 | To build nested structure (i.e. a list of dicts), use fm.nested(). | |
25 |
|
25 | |||
26 | See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan |
|
26 | See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan | |
27 |
|
27 | |||
28 | fm.condwrite() vs 'if cond:': |
|
28 | fm.condwrite() vs 'if cond:': | |
29 |
|
29 | |||
30 | In most cases, use fm.condwrite() so users can selectively show the data |
|
30 | In most cases, use fm.condwrite() so users can selectively show the data | |
31 | in template output. If it's costly to build data, use plain 'if cond:' with |
|
31 | in template output. If it's costly to build data, use plain 'if cond:' with | |
32 | fm.write(). |
|
32 | fm.write(). | |
33 |
|
33 | |||
34 | fm.nested() vs fm.formatdict() (or fm.formatlist()): |
|
34 | fm.nested() vs fm.formatdict() (or fm.formatlist()): | |
35 |
|
35 | |||
36 | fm.nested() should be used to form a tree structure (a list of dicts of |
|
36 | fm.nested() should be used to form a tree structure (a list of dicts of | |
37 | lists of dicts...) which can be accessed through template keywords, e.g. |
|
37 | lists of dicts...) which can be accessed through template keywords, e.g. | |
38 | "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict() |
|
38 | "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict() | |
39 | exports a dict-type object to template, which can be accessed by e.g. |
|
39 | exports a dict-type object to template, which can be accessed by e.g. | |
40 | "{get(foo, key)}" function. |
|
40 | "{get(foo, key)}" function. | |
41 |
|
41 | |||
42 | Doctest helper: |
|
42 | Doctest helper: | |
43 |
|
43 | |||
44 | >>> def show(fn, verbose=False, **opts): |
|
44 | >>> def show(fn, verbose=False, **opts): | |
45 | ... import sys |
|
45 | ... import sys | |
46 | ... from . import ui as uimod |
|
46 | ... from . import ui as uimod | |
47 | ... ui = uimod.ui() |
|
47 | ... ui = uimod.ui() | |
48 | ... ui.verbose = verbose |
|
48 | ... ui.verbose = verbose | |
49 | ... ui.pushbuffer() |
|
49 | ... ui.pushbuffer() | |
50 | ... try: |
|
50 | ... try: | |
51 | ... return fn(ui, ui.formatter(pycompat.sysbytes(fn.__name__), |
|
51 | ... return fn(ui, ui.formatter(pycompat.sysbytes(fn.__name__), | |
52 | ... pycompat.byteskwargs(opts))) |
|
52 | ... pycompat.byteskwargs(opts))) | |
53 | ... finally: |
|
53 | ... finally: | |
54 | ... print(pycompat.sysstr(ui.popbuffer()), end='') |
|
54 | ... print(pycompat.sysstr(ui.popbuffer()), end='') | |
55 |
|
55 | |||
56 | Basic example: |
|
56 | Basic example: | |
57 |
|
57 | |||
58 | >>> def files(ui, fm): |
|
58 | >>> def files(ui, fm): | |
59 | ... files = [(b'foo', 123, (0, 0)), (b'bar', 456, (1, 0))] |
|
59 | ... files = [(b'foo', 123, (0, 0)), (b'bar', 456, (1, 0))] | |
60 | ... for f in files: |
|
60 | ... for f in files: | |
61 | ... fm.startitem() |
|
61 | ... fm.startitem() | |
62 | ... fm.write(b'path', b'%s', f[0]) |
|
62 | ... fm.write(b'path', b'%s', f[0]) | |
63 | ... fm.condwrite(ui.verbose, b'date', b' %s', |
|
63 | ... fm.condwrite(ui.verbose, b'date', b' %s', | |
64 | ... fm.formatdate(f[2], b'%Y-%m-%d %H:%M:%S')) |
|
64 | ... fm.formatdate(f[2], b'%Y-%m-%d %H:%M:%S')) | |
65 | ... fm.data(size=f[1]) |
|
65 | ... fm.data(size=f[1]) | |
66 | ... fm.plain(b'\\n') |
|
66 | ... fm.plain(b'\\n') | |
67 | ... fm.end() |
|
67 | ... fm.end() | |
68 | >>> show(files) |
|
68 | >>> show(files) | |
69 | foo |
|
69 | foo | |
70 | bar |
|
70 | bar | |
71 | >>> show(files, verbose=True) |
|
71 | >>> show(files, verbose=True) | |
72 | foo 1970-01-01 00:00:00 |
|
72 | foo 1970-01-01 00:00:00 | |
73 | bar 1970-01-01 00:00:01 |
|
73 | bar 1970-01-01 00:00:01 | |
74 | >>> show(files, template=b'json') |
|
74 | >>> show(files, template=b'json') | |
75 | [ |
|
75 | [ | |
76 | { |
|
76 | { | |
77 | "date": [0, 0], |
|
77 | "date": [0, 0], | |
78 | "path": "foo", |
|
78 | "path": "foo", | |
79 | "size": 123 |
|
79 | "size": 123 | |
80 | }, |
|
80 | }, | |
81 | { |
|
81 | { | |
82 | "date": [1, 0], |
|
82 | "date": [1, 0], | |
83 | "path": "bar", |
|
83 | "path": "bar", | |
84 | "size": 456 |
|
84 | "size": 456 | |
85 | } |
|
85 | } | |
86 | ] |
|
86 | ] | |
87 | >>> show(files, template=b'path: {path}\\ndate: {date|rfc3339date}\\n') |
|
87 | >>> show(files, template=b'path: {path}\\ndate: {date|rfc3339date}\\n') | |
88 | path: foo |
|
88 | path: foo | |
89 | date: 1970-01-01T00:00:00+00:00 |
|
89 | date: 1970-01-01T00:00:00+00:00 | |
90 | path: bar |
|
90 | path: bar | |
91 | date: 1970-01-01T00:00:01+00:00 |
|
91 | date: 1970-01-01T00:00:01+00:00 | |
92 |
|
92 | |||
93 | Nested example: |
|
93 | Nested example: | |
94 |
|
94 | |||
95 | >>> def subrepos(ui, fm): |
|
95 | >>> def subrepos(ui, fm): | |
96 | ... fm.startitem() |
|
96 | ... fm.startitem() | |
97 | ... fm.write(b'reponame', b'[%s]\\n', b'baz') |
|
97 | ... fm.write(b'reponame', b'[%s]\\n', b'baz') | |
98 | ... files(ui, fm.nested(b'files')) |
|
98 | ... files(ui, fm.nested(b'files')) | |
99 | ... fm.end() |
|
99 | ... fm.end() | |
100 | >>> show(subrepos) |
|
100 | >>> show(subrepos) | |
101 | [baz] |
|
101 | [baz] | |
102 | foo |
|
102 | foo | |
103 | bar |
|
103 | bar | |
104 | >>> show(subrepos, template=b'{reponame}: {join(files % "{path}", ", ")}\\n') |
|
104 | >>> show(subrepos, template=b'{reponame}: {join(files % "{path}", ", ")}\\n') | |
105 | baz: foo, bar |
|
105 | baz: foo, bar | |
106 | """ |
|
106 | """ | |
107 |
|
107 | |||
108 | from __future__ import absolute_import, print_function |
|
108 | from __future__ import absolute_import, print_function | |
109 |
|
109 | |||
110 | import collections |
|
110 | import collections | |
111 | import contextlib |
|
111 | import contextlib | |
112 | import itertools |
|
112 | import itertools | |
113 | import os |
|
113 | import os | |
114 |
|
114 | |||
115 | from .i18n import _ |
|
115 | from .i18n import _ | |
116 | from .node import ( |
|
116 | from .node import ( | |
117 | hex, |
|
117 | hex, | |
118 | short, |
|
118 | short, | |
119 | ) |
|
119 | ) | |
120 |
|
120 | |||
121 | from . import ( |
|
121 | from . import ( | |
122 | error, |
|
122 | error, | |
123 | pycompat, |
|
123 | pycompat, | |
124 | templatefilters, |
|
124 | templatefilters, | |
125 | templatekw, |
|
125 | templatekw, | |
126 | templater, |
|
126 | templater, | |
127 | templateutil, |
|
127 | templateutil, | |
128 | util, |
|
128 | util, | |
129 | ) |
|
129 | ) | |
130 | from .utils import dateutil |
|
130 | from .utils import dateutil | |
131 |
|
131 | |||
132 | pickle = util.pickle |
|
132 | pickle = util.pickle | |
133 |
|
133 | |||
134 | class _nullconverter(object): |
|
134 | class _nullconverter(object): | |
135 | '''convert non-primitive data types to be processed by formatter''' |
|
135 | '''convert non-primitive data types to be processed by formatter''' | |
136 |
|
136 | |||
137 | # set to True if context object should be stored as item |
|
137 | # set to True if context object should be stored as item | |
138 | storecontext = False |
|
138 | storecontext = False | |
139 |
|
139 | |||
140 | @staticmethod |
|
140 | @staticmethod | |
141 | def formatdate(date, fmt): |
|
141 | def formatdate(date, fmt): | |
142 | '''convert date tuple to appropriate format''' |
|
142 | '''convert date tuple to appropriate format''' | |
143 | return date |
|
143 | return date | |
144 | @staticmethod |
|
144 | @staticmethod | |
145 | def formatdict(data, key, value, fmt, sep): |
|
145 | def formatdict(data, key, value, fmt, sep): | |
146 | '''convert dict or key-value pairs to appropriate dict format''' |
|
146 | '''convert dict or key-value pairs to appropriate dict format''' | |
147 | # use plain dict instead of util.sortdict so that data can be |
|
147 | # use plain dict instead of util.sortdict so that data can be | |
148 | # serialized as a builtin dict in pickle output |
|
148 | # serialized as a builtin dict in pickle output | |
149 | return dict(data) |
|
149 | return dict(data) | |
150 | @staticmethod |
|
150 | @staticmethod | |
151 | def formatlist(data, name, fmt, sep): |
|
151 | def formatlist(data, name, fmt, sep): | |
152 | '''convert iterable to appropriate list format''' |
|
152 | '''convert iterable to appropriate list format''' | |
153 | return list(data) |
|
153 | return list(data) | |
154 |
|
154 | |||
155 | class baseformatter(object): |
|
155 | class baseformatter(object): | |
156 | def __init__(self, ui, topic, opts, converter): |
|
156 | def __init__(self, ui, topic, opts, converter): | |
157 | self._ui = ui |
|
157 | self._ui = ui | |
158 | self._topic = topic |
|
158 | self._topic = topic | |
159 | self._style = opts.get("style") |
|
159 | self._style = opts.get("style") | |
160 | self._template = opts.get("template") |
|
160 | self._template = opts.get("template") | |
161 | self._converter = converter |
|
161 | self._converter = converter | |
162 | self._item = None |
|
162 | self._item = None | |
163 | # function to convert node to string suitable for this output |
|
163 | # function to convert node to string suitable for this output | |
164 | self.hexfunc = hex |
|
164 | self.hexfunc = hex | |
165 | def __enter__(self): |
|
165 | def __enter__(self): | |
166 | return self |
|
166 | return self | |
167 | def __exit__(self, exctype, excvalue, traceback): |
|
167 | def __exit__(self, exctype, excvalue, traceback): | |
168 | if exctype is None: |
|
168 | if exctype is None: | |
169 | self.end() |
|
169 | self.end() | |
170 | def _showitem(self): |
|
170 | def _showitem(self): | |
171 | '''show a formatted item once all data is collected''' |
|
171 | '''show a formatted item once all data is collected''' | |
172 | def startitem(self): |
|
172 | def startitem(self): | |
173 | '''begin an item in the format list''' |
|
173 | '''begin an item in the format list''' | |
174 | if self._item is not None: |
|
174 | if self._item is not None: | |
175 | self._showitem() |
|
175 | self._showitem() | |
176 | self._item = {} |
|
176 | self._item = {} | |
177 | def formatdate(self, date, fmt='%a %b %d %H:%M:%S %Y %1%2'): |
|
177 | def formatdate(self, date, fmt='%a %b %d %H:%M:%S %Y %1%2'): | |
178 | '''convert date tuple to appropriate format''' |
|
178 | '''convert date tuple to appropriate format''' | |
179 | return self._converter.formatdate(date, fmt) |
|
179 | return self._converter.formatdate(date, fmt) | |
180 | def formatdict(self, data, key='key', value='value', fmt=None, sep=' '): |
|
180 | def formatdict(self, data, key='key', value='value', fmt=None, sep=' '): | |
181 | '''convert dict or key-value pairs to appropriate dict format''' |
|
181 | '''convert dict or key-value pairs to appropriate dict format''' | |
182 | return self._converter.formatdict(data, key, value, fmt, sep) |
|
182 | return self._converter.formatdict(data, key, value, fmt, sep) | |
183 | def formatlist(self, data, name, fmt=None, sep=' '): |
|
183 | def formatlist(self, data, name, fmt=None, sep=' '): | |
184 | '''convert iterable to appropriate list format''' |
|
184 | '''convert iterable to appropriate list format''' | |
185 | # name is mandatory argument for now, but it could be optional if |
|
185 | # name is mandatory argument for now, but it could be optional if | |
186 | # we have default template keyword, e.g. {item} |
|
186 | # we have default template keyword, e.g. {item} | |
187 | return self._converter.formatlist(data, name, fmt, sep) |
|
187 | return self._converter.formatlist(data, name, fmt, sep) | |
188 | def context(self, **ctxs): |
|
188 | def context(self, **ctxs): | |
189 | '''insert context objects to be used to render template keywords''' |
|
189 | '''insert context objects to be used to render template keywords''' | |
190 | ctxs = pycompat.byteskwargs(ctxs) |
|
190 | ctxs = pycompat.byteskwargs(ctxs) | |
191 | assert all(k in {'ctx', 'fctx'} for k in ctxs) |
|
191 | assert all(k in {'ctx', 'fctx'} for k in ctxs) | |
192 | if self._converter.storecontext: |
|
192 | if self._converter.storecontext: | |
193 | self._item.update(ctxs) |
|
193 | self._item.update(ctxs) | |
194 | def data(self, **data): |
|
194 | def data(self, **data): | |
195 | '''insert data into item that's not shown in default output''' |
|
195 | '''insert data into item that's not shown in default output''' | |
196 | data = pycompat.byteskwargs(data) |
|
196 | data = pycompat.byteskwargs(data) | |
197 | self._item.update(data) |
|
197 | self._item.update(data) | |
198 | def write(self, fields, deftext, *fielddata, **opts): |
|
198 | def write(self, fields, deftext, *fielddata, **opts): | |
199 | '''do default text output while assigning data to item''' |
|
199 | '''do default text output while assigning data to item''' | |
200 | fieldkeys = fields.split() |
|
200 | fieldkeys = fields.split() | |
201 | assert len(fieldkeys) == len(fielddata) |
|
201 | assert len(fieldkeys) == len(fielddata) | |
202 | self._item.update(zip(fieldkeys, fielddata)) |
|
202 | self._item.update(zip(fieldkeys, fielddata)) | |
203 | def condwrite(self, cond, fields, deftext, *fielddata, **opts): |
|
203 | def condwrite(self, cond, fields, deftext, *fielddata, **opts): | |
204 | '''do conditional write (primarily for plain formatter)''' |
|
204 | '''do conditional write (primarily for plain formatter)''' | |
205 | fieldkeys = fields.split() |
|
205 | fieldkeys = fields.split() | |
206 | assert len(fieldkeys) == len(fielddata) |
|
206 | assert len(fieldkeys) == len(fielddata) | |
207 | self._item.update(zip(fieldkeys, fielddata)) |
|
207 | self._item.update(zip(fieldkeys, fielddata)) | |
208 | def plain(self, text, **opts): |
|
208 | def plain(self, text, **opts): | |
209 | '''show raw text for non-templated mode''' |
|
209 | '''show raw text for non-templated mode''' | |
210 | def isplain(self): |
|
210 | def isplain(self): | |
211 | '''check for plain formatter usage''' |
|
211 | '''check for plain formatter usage''' | |
212 | return False |
|
212 | return False | |
213 | def nested(self, field): |
|
213 | def nested(self, field): | |
214 | '''sub formatter to store nested data in the specified field''' |
|
214 | '''sub formatter to store nested data in the specified field''' | |
215 | self._item[field] = data = [] |
|
215 | self._item[field] = data = [] | |
216 | return _nestedformatter(self._ui, self._converter, data) |
|
216 | return _nestedformatter(self._ui, self._converter, data) | |
217 | def end(self): |
|
217 | def end(self): | |
218 | '''end output for the formatter''' |
|
218 | '''end output for the formatter''' | |
219 | if self._item is not None: |
|
219 | if self._item is not None: | |
220 | self._showitem() |
|
220 | self._showitem() | |
221 |
|
221 | |||
222 | def nullformatter(ui, topic): |
|
222 | def nullformatter(ui, topic): | |
223 | '''formatter that prints nothing''' |
|
223 | '''formatter that prints nothing''' | |
224 | return baseformatter(ui, topic, opts={}, converter=_nullconverter) |
|
224 | return baseformatter(ui, topic, opts={}, converter=_nullconverter) | |
225 |
|
225 | |||
226 | class _nestedformatter(baseformatter): |
|
226 | class _nestedformatter(baseformatter): | |
227 | '''build sub items and store them in the parent formatter''' |
|
227 | '''build sub items and store them in the parent formatter''' | |
228 | def __init__(self, ui, converter, data): |
|
228 | def __init__(self, ui, converter, data): | |
229 | baseformatter.__init__(self, ui, topic='', opts={}, converter=converter) |
|
229 | baseformatter.__init__(self, ui, topic='', opts={}, converter=converter) | |
230 | self._data = data |
|
230 | self._data = data | |
231 | def _showitem(self): |
|
231 | def _showitem(self): | |
232 | self._data.append(self._item) |
|
232 | self._data.append(self._item) | |
233 |
|
233 | |||
234 | def _iteritems(data): |
|
234 | def _iteritems(data): | |
235 | '''iterate key-value pairs in stable order''' |
|
235 | '''iterate key-value pairs in stable order''' | |
236 | if isinstance(data, dict): |
|
236 | if isinstance(data, dict): | |
237 | return sorted(data.iteritems()) |
|
237 | return sorted(data.iteritems()) | |
238 | return data |
|
238 | return data | |
239 |
|
239 | |||
240 | class _plainconverter(object): |
|
240 | class _plainconverter(object): | |
241 | '''convert non-primitive data types to text''' |
|
241 | '''convert non-primitive data types to text''' | |
242 |
|
242 | |||
243 | storecontext = False |
|
243 | storecontext = False | |
244 |
|
244 | |||
245 | @staticmethod |
|
245 | @staticmethod | |
246 | def formatdate(date, fmt): |
|
246 | def formatdate(date, fmt): | |
247 | '''stringify date tuple in the given format''' |
|
247 | '''stringify date tuple in the given format''' | |
248 | return dateutil.datestr(date, fmt) |
|
248 | return dateutil.datestr(date, fmt) | |
249 | @staticmethod |
|
249 | @staticmethod | |
250 | def formatdict(data, key, value, fmt, sep): |
|
250 | def formatdict(data, key, value, fmt, sep): | |
251 | '''stringify key-value pairs separated by sep''' |
|
251 | '''stringify key-value pairs separated by sep''' | |
252 | prefmt = pycompat.identity |
|
252 | prefmt = pycompat.identity | |
253 | if fmt is None: |
|
253 | if fmt is None: | |
254 | fmt = '%s=%s' |
|
254 | fmt = '%s=%s' | |
255 | prefmt = pycompat.bytestr |
|
255 | prefmt = pycompat.bytestr | |
256 | return sep.join(fmt % (prefmt(k), prefmt(v)) |
|
256 | return sep.join(fmt % (prefmt(k), prefmt(v)) | |
257 | for k, v in _iteritems(data)) |
|
257 | for k, v in _iteritems(data)) | |
258 | @staticmethod |
|
258 | @staticmethod | |
259 | def formatlist(data, name, fmt, sep): |
|
259 | def formatlist(data, name, fmt, sep): | |
260 | '''stringify iterable separated by sep''' |
|
260 | '''stringify iterable separated by sep''' | |
261 | prefmt = pycompat.identity |
|
261 | prefmt = pycompat.identity | |
262 | if fmt is None: |
|
262 | if fmt is None: | |
263 | fmt = '%s' |
|
263 | fmt = '%s' | |
264 | prefmt = pycompat.bytestr |
|
264 | prefmt = pycompat.bytestr | |
265 | return sep.join(fmt % prefmt(e) for e in data) |
|
265 | return sep.join(fmt % prefmt(e) for e in data) | |
266 |
|
266 | |||
267 | class plainformatter(baseformatter): |
|
267 | class plainformatter(baseformatter): | |
268 | '''the default text output scheme''' |
|
268 | '''the default text output scheme''' | |
269 | def __init__(self, ui, out, topic, opts): |
|
269 | def __init__(self, ui, out, topic, opts): | |
270 | baseformatter.__init__(self, ui, topic, opts, _plainconverter) |
|
270 | baseformatter.__init__(self, ui, topic, opts, _plainconverter) | |
271 | if ui.debugflag: |
|
271 | if ui.debugflag: | |
272 | self.hexfunc = hex |
|
272 | self.hexfunc = hex | |
273 | else: |
|
273 | else: | |
274 | self.hexfunc = short |
|
274 | self.hexfunc = short | |
275 | if ui is out: |
|
275 | if ui is out: | |
276 | self._write = ui.write |
|
276 | self._write = ui.write | |
277 | else: |
|
277 | else: | |
278 | self._write = lambda s, **opts: out.write(s) |
|
278 | self._write = lambda s, **opts: out.write(s) | |
279 | def startitem(self): |
|
279 | def startitem(self): | |
280 | pass |
|
280 | pass | |
281 | def data(self, **data): |
|
281 | def data(self, **data): | |
282 | pass |
|
282 | pass | |
283 | def write(self, fields, deftext, *fielddata, **opts): |
|
283 | def write(self, fields, deftext, *fielddata, **opts): | |
284 | self._write(deftext % fielddata, **opts) |
|
284 | self._write(deftext % fielddata, **opts) | |
285 | def condwrite(self, cond, fields, deftext, *fielddata, **opts): |
|
285 | def condwrite(self, cond, fields, deftext, *fielddata, **opts): | |
286 | '''do conditional write''' |
|
286 | '''do conditional write''' | |
287 | if cond: |
|
287 | if cond: | |
288 | self._write(deftext % fielddata, **opts) |
|
288 | self._write(deftext % fielddata, **opts) | |
289 | def plain(self, text, **opts): |
|
289 | def plain(self, text, **opts): | |
290 | self._write(text, **opts) |
|
290 | self._write(text, **opts) | |
291 | def isplain(self): |
|
291 | def isplain(self): | |
292 | return True |
|
292 | return True | |
293 | def nested(self, field): |
|
293 | def nested(self, field): | |
294 | # nested data will be directly written to ui |
|
294 | # nested data will be directly written to ui | |
295 | return self |
|
295 | return self | |
296 | def end(self): |
|
296 | def end(self): | |
297 | pass |
|
297 | pass | |
298 |
|
298 | |||
299 | class debugformatter(baseformatter): |
|
299 | class debugformatter(baseformatter): | |
300 | def __init__(self, ui, out, topic, opts): |
|
300 | def __init__(self, ui, out, topic, opts): | |
301 | baseformatter.__init__(self, ui, topic, opts, _nullconverter) |
|
301 | baseformatter.__init__(self, ui, topic, opts, _nullconverter) | |
302 | self._out = out |
|
302 | self._out = out | |
303 | self._out.write("%s = [\n" % self._topic) |
|
303 | self._out.write("%s = [\n" % self._topic) | |
304 | def _showitem(self): |
|
304 | def _showitem(self): | |
305 | self._out.write(' %s,\n' % pycompat.byterepr(self._item)) |
|
305 | self._out.write(' %s,\n' % pycompat.byterepr(self._item)) | |
306 | def end(self): |
|
306 | def end(self): | |
307 | baseformatter.end(self) |
|
307 | baseformatter.end(self) | |
308 | self._out.write("]\n") |
|
308 | self._out.write("]\n") | |
309 |
|
309 | |||
310 | class pickleformatter(baseformatter): |
|
310 | class pickleformatter(baseformatter): | |
311 | def __init__(self, ui, out, topic, opts): |
|
311 | def __init__(self, ui, out, topic, opts): | |
312 | baseformatter.__init__(self, ui, topic, opts, _nullconverter) |
|
312 | baseformatter.__init__(self, ui, topic, opts, _nullconverter) | |
313 | self._out = out |
|
313 | self._out = out | |
314 | self._data = [] |
|
314 | self._data = [] | |
315 | def _showitem(self): |
|
315 | def _showitem(self): | |
316 | self._data.append(self._item) |
|
316 | self._data.append(self._item) | |
317 | def end(self): |
|
317 | def end(self): | |
318 | baseformatter.end(self) |
|
318 | baseformatter.end(self) | |
319 | self._out.write(pickle.dumps(self._data)) |
|
319 | self._out.write(pickle.dumps(self._data)) | |
320 |
|
320 | |||
321 | class jsonformatter(baseformatter): |
|
321 | class jsonformatter(baseformatter): | |
322 | def __init__(self, ui, out, topic, opts): |
|
322 | def __init__(self, ui, out, topic, opts): | |
323 | baseformatter.__init__(self, ui, topic, opts, _nullconverter) |
|
323 | baseformatter.__init__(self, ui, topic, opts, _nullconverter) | |
324 | self._out = out |
|
324 | self._out = out | |
325 | self._out.write("[") |
|
325 | self._out.write("[") | |
326 | self._first = True |
|
326 | self._first = True | |
327 | def _showitem(self): |
|
327 | def _showitem(self): | |
328 | if self._first: |
|
328 | if self._first: | |
329 | self._first = False |
|
329 | self._first = False | |
330 | else: |
|
330 | else: | |
331 | self._out.write(",") |
|
331 | self._out.write(",") | |
332 |
|
332 | |||
333 | self._out.write("\n {\n") |
|
333 | self._out.write("\n {\n") | |
334 | first = True |
|
334 | first = True | |
335 | for k, v in sorted(self._item.items()): |
|
335 | for k, v in sorted(self._item.items()): | |
336 | if first: |
|
336 | if first: | |
337 | first = False |
|
337 | first = False | |
338 | else: |
|
338 | else: | |
339 | self._out.write(",\n") |
|
339 | self._out.write(",\n") | |
340 | u = templatefilters.json(v, paranoid=False) |
|
340 | u = templatefilters.json(v, paranoid=False) | |
341 | self._out.write(' "%s": %s' % (k, u)) |
|
341 | self._out.write(' "%s": %s' % (k, u)) | |
342 | self._out.write("\n }") |
|
342 | self._out.write("\n }") | |
343 | def end(self): |
|
343 | def end(self): | |
344 | baseformatter.end(self) |
|
344 | baseformatter.end(self) | |
345 | self._out.write("\n]\n") |
|
345 | self._out.write("\n]\n") | |
346 |
|
346 | |||
347 | class _templateconverter(object): |
|
347 | class _templateconverter(object): | |
348 | '''convert non-primitive data types to be processed by templater''' |
|
348 | '''convert non-primitive data types to be processed by templater''' | |
349 |
|
349 | |||
350 | storecontext = True |
|
350 | storecontext = True | |
351 |
|
351 | |||
352 | @staticmethod |
|
352 | @staticmethod | |
353 | def formatdate(date, fmt): |
|
353 | def formatdate(date, fmt): | |
354 | '''return date tuple''' |
|
354 | '''return date tuple''' | |
355 | return date |
|
355 | return date | |
356 | @staticmethod |
|
356 | @staticmethod | |
357 | def formatdict(data, key, value, fmt, sep): |
|
357 | def formatdict(data, key, value, fmt, sep): | |
358 | '''build object that can be evaluated as either plain string or dict''' |
|
358 | '''build object that can be evaluated as either plain string or dict''' | |
359 | data = util.sortdict(_iteritems(data)) |
|
359 | data = util.sortdict(_iteritems(data)) | |
360 | def f(): |
|
360 | def f(): | |
361 | yield _plainconverter.formatdict(data, key, value, fmt, sep) |
|
361 | yield _plainconverter.formatdict(data, key, value, fmt, sep) | |
362 | return templateutil.hybriddict(data, key=key, value=value, fmt=fmt, |
|
362 | return templateutil.hybriddict(data, key=key, value=value, fmt=fmt, | |
363 | gen=f) |
|
363 | gen=f) | |
364 | @staticmethod |
|
364 | @staticmethod | |
365 | def formatlist(data, name, fmt, sep): |
|
365 | def formatlist(data, name, fmt, sep): | |
366 | '''build object that can be evaluated as either plain string or list''' |
|
366 | '''build object that can be evaluated as either plain string or list''' | |
367 | data = list(data) |
|
367 | data = list(data) | |
368 | def f(): |
|
368 | def f(): | |
369 | yield _plainconverter.formatlist(data, name, fmt, sep) |
|
369 | yield _plainconverter.formatlist(data, name, fmt, sep) | |
370 | return templateutil.hybridlist(data, name=name, fmt=fmt, gen=f) |
|
370 | return templateutil.hybridlist(data, name=name, fmt=fmt, gen=f) | |
371 |
|
371 | |||
372 | class templateformatter(baseformatter): |
|
372 | class templateformatter(baseformatter): | |
373 | def __init__(self, ui, out, topic, opts): |
|
373 | def __init__(self, ui, out, topic, opts): | |
374 | baseformatter.__init__(self, ui, topic, opts, _templateconverter) |
|
374 | baseformatter.__init__(self, ui, topic, opts, _templateconverter) | |
375 | self._out = out |
|
375 | self._out = out | |
376 | spec = lookuptemplate(ui, topic, opts.get('template', '')) |
|
376 | spec = lookuptemplate(ui, topic, opts.get('template', '')) | |
377 | self._tref = spec.ref |
|
377 | self._tref = spec.ref | |
378 | self._t = loadtemplater(ui, spec, defaults=templatekw.keywords, |
|
378 | self._t = loadtemplater(ui, spec, defaults=templatekw.keywords, | |
379 | resources=templateresources(ui), |
|
379 | resources=templateresources(ui), | |
380 | cache=templatekw.defaulttempl) |
|
380 | cache=templatekw.defaulttempl) | |
381 | self._parts = templatepartsmap(spec, self._t, |
|
381 | self._parts = templatepartsmap(spec, self._t, | |
382 | ['docheader', 'docfooter', 'separator']) |
|
382 | ['docheader', 'docfooter', 'separator']) | |
383 | self._counter = itertools.count() |
|
383 | self._counter = itertools.count() | |
384 | self._renderitem('docheader', {}) |
|
384 | self._renderitem('docheader', {}) | |
385 |
|
385 | |||
386 | def _showitem(self): |
|
386 | def _showitem(self): | |
387 | item = self._item.copy() |
|
387 | item = self._item.copy() | |
388 | item['index'] = index = next(self._counter) |
|
388 | item['index'] = index = next(self._counter) | |
389 | if index > 0: |
|
389 | if index > 0: | |
390 | self._renderitem('separator', {}) |
|
390 | self._renderitem('separator', {}) | |
391 | self._renderitem(self._tref, item) |
|
391 | self._renderitem(self._tref, item) | |
392 |
|
392 | |||
393 | def _renderitem(self, part, item): |
|
393 | def _renderitem(self, part, item): | |
394 | if part not in self._parts: |
|
394 | if part not in self._parts: | |
395 | return |
|
395 | return | |
396 | ref = self._parts[part] |
|
396 | ref = self._parts[part] | |
397 |
|
397 | |||
398 | props = {} |
|
398 | props = {} | |
399 | # explicitly-defined fields precede templatekw |
|
399 | # explicitly-defined fields precede templatekw | |
400 | props.update(item) |
|
400 | props.update(item) | |
401 | if 'ctx' in item or 'fctx' in item: |
|
401 | if 'ctx' in item or 'fctx' in item: | |
402 | # but template resources must be always available |
|
402 | # but template resources must be always available | |
403 | props['revcache'] = {} |
|
403 | props['revcache'] = {} | |
404 | props = pycompat.strkwargs(props) |
|
404 | self._out.write(self._t.render(ref, props)) | |
405 | g = self._t(ref, **props) |
|
|||
406 | self._out.write(templateutil.stringify(g)) |
|
|||
407 |
|
405 | |||
408 | def end(self): |
|
406 | def end(self): | |
409 | baseformatter.end(self) |
|
407 | baseformatter.end(self) | |
410 | self._renderitem('docfooter', {}) |
|
408 | self._renderitem('docfooter', {}) | |
411 |
|
409 | |||
412 | templatespec = collections.namedtuple(r'templatespec', |
|
410 | templatespec = collections.namedtuple(r'templatespec', | |
413 | r'ref tmpl mapfile') |
|
411 | r'ref tmpl mapfile') | |
414 |
|
412 | |||
415 | def lookuptemplate(ui, topic, tmpl): |
|
413 | def lookuptemplate(ui, topic, tmpl): | |
416 | """Find the template matching the given -T/--template spec 'tmpl' |
|
414 | """Find the template matching the given -T/--template spec 'tmpl' | |
417 |
|
415 | |||
418 | 'tmpl' can be any of the following: |
|
416 | 'tmpl' can be any of the following: | |
419 |
|
417 | |||
420 | - a literal template (e.g. '{rev}') |
|
418 | - a literal template (e.g. '{rev}') | |
421 | - a map-file name or path (e.g. 'changelog') |
|
419 | - a map-file name or path (e.g. 'changelog') | |
422 | - a reference to [templates] in config file |
|
420 | - a reference to [templates] in config file | |
423 | - a path to raw template file |
|
421 | - a path to raw template file | |
424 |
|
422 | |||
425 | A map file defines a stand-alone template environment. If a map file |
|
423 | A map file defines a stand-alone template environment. If a map file | |
426 | selected, all templates defined in the file will be loaded, and the |
|
424 | selected, all templates defined in the file will be loaded, and the | |
427 | template matching the given topic will be rendered. Aliases won't be |
|
425 | template matching the given topic will be rendered. Aliases won't be | |
428 | loaded from user config, but from the map file. |
|
426 | loaded from user config, but from the map file. | |
429 |
|
427 | |||
430 | If no map file selected, all templates in [templates] section will be |
|
428 | If no map file selected, all templates in [templates] section will be | |
431 | available as well as aliases in [templatealias]. |
|
429 | available as well as aliases in [templatealias]. | |
432 | """ |
|
430 | """ | |
433 |
|
431 | |||
434 | # looks like a literal template? |
|
432 | # looks like a literal template? | |
435 | if '{' in tmpl: |
|
433 | if '{' in tmpl: | |
436 | return templatespec('', tmpl, None) |
|
434 | return templatespec('', tmpl, None) | |
437 |
|
435 | |||
438 | # perhaps a stock style? |
|
436 | # perhaps a stock style? | |
439 | if not os.path.split(tmpl)[0]: |
|
437 | if not os.path.split(tmpl)[0]: | |
440 | mapname = (templater.templatepath('map-cmdline.' + tmpl) |
|
438 | mapname = (templater.templatepath('map-cmdline.' + tmpl) | |
441 | or templater.templatepath(tmpl)) |
|
439 | or templater.templatepath(tmpl)) | |
442 | if mapname and os.path.isfile(mapname): |
|
440 | if mapname and os.path.isfile(mapname): | |
443 | return templatespec(topic, None, mapname) |
|
441 | return templatespec(topic, None, mapname) | |
444 |
|
442 | |||
445 | # perhaps it's a reference to [templates] |
|
443 | # perhaps it's a reference to [templates] | |
446 | if ui.config('templates', tmpl): |
|
444 | if ui.config('templates', tmpl): | |
447 | return templatespec(tmpl, None, None) |
|
445 | return templatespec(tmpl, None, None) | |
448 |
|
446 | |||
449 | if tmpl == 'list': |
|
447 | if tmpl == 'list': | |
450 | ui.write(_("available styles: %s\n") % templater.stylelist()) |
|
448 | ui.write(_("available styles: %s\n") % templater.stylelist()) | |
451 | raise error.Abort(_("specify a template")) |
|
449 | raise error.Abort(_("specify a template")) | |
452 |
|
450 | |||
453 | # perhaps it's a path to a map or a template |
|
451 | # perhaps it's a path to a map or a template | |
454 | if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl): |
|
452 | if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl): | |
455 | # is it a mapfile for a style? |
|
453 | # is it a mapfile for a style? | |
456 | if os.path.basename(tmpl).startswith("map-"): |
|
454 | if os.path.basename(tmpl).startswith("map-"): | |
457 | return templatespec(topic, None, os.path.realpath(tmpl)) |
|
455 | return templatespec(topic, None, os.path.realpath(tmpl)) | |
458 | with util.posixfile(tmpl, 'rb') as f: |
|
456 | with util.posixfile(tmpl, 'rb') as f: | |
459 | tmpl = f.read() |
|
457 | tmpl = f.read() | |
460 | return templatespec('', tmpl, None) |
|
458 | return templatespec('', tmpl, None) | |
461 |
|
459 | |||
462 | # constant string? |
|
460 | # constant string? | |
463 | return templatespec('', tmpl, None) |
|
461 | return templatespec('', tmpl, None) | |
464 |
|
462 | |||
465 | def templatepartsmap(spec, t, partnames): |
|
463 | def templatepartsmap(spec, t, partnames): | |
466 | """Create a mapping of {part: ref}""" |
|
464 | """Create a mapping of {part: ref}""" | |
467 | partsmap = {spec.ref: spec.ref} # initial ref must exist in t |
|
465 | partsmap = {spec.ref: spec.ref} # initial ref must exist in t | |
468 | if spec.mapfile: |
|
466 | if spec.mapfile: | |
469 | partsmap.update((p, p) for p in partnames if p in t) |
|
467 | partsmap.update((p, p) for p in partnames if p in t) | |
470 | elif spec.ref: |
|
468 | elif spec.ref: | |
471 | for part in partnames: |
|
469 | for part in partnames: | |
472 | ref = '%s:%s' % (spec.ref, part) # select config sub-section |
|
470 | ref = '%s:%s' % (spec.ref, part) # select config sub-section | |
473 | if ref in t: |
|
471 | if ref in t: | |
474 | partsmap[part] = ref |
|
472 | partsmap[part] = ref | |
475 | return partsmap |
|
473 | return partsmap | |
476 |
|
474 | |||
477 | def loadtemplater(ui, spec, defaults=None, resources=None, cache=None): |
|
475 | def loadtemplater(ui, spec, defaults=None, resources=None, cache=None): | |
478 | """Create a templater from either a literal template or loading from |
|
476 | """Create a templater from either a literal template or loading from | |
479 | a map file""" |
|
477 | a map file""" | |
480 | assert not (spec.tmpl and spec.mapfile) |
|
478 | assert not (spec.tmpl and spec.mapfile) | |
481 | if spec.mapfile: |
|
479 | if spec.mapfile: | |
482 | frommapfile = templater.templater.frommapfile |
|
480 | frommapfile = templater.templater.frommapfile | |
483 | return frommapfile(spec.mapfile, defaults=defaults, resources=resources, |
|
481 | return frommapfile(spec.mapfile, defaults=defaults, resources=resources, | |
484 | cache=cache) |
|
482 | cache=cache) | |
485 | return maketemplater(ui, spec.tmpl, defaults=defaults, resources=resources, |
|
483 | return maketemplater(ui, spec.tmpl, defaults=defaults, resources=resources, | |
486 | cache=cache) |
|
484 | cache=cache) | |
487 |
|
485 | |||
488 | def maketemplater(ui, tmpl, defaults=None, resources=None, cache=None): |
|
486 | def maketemplater(ui, tmpl, defaults=None, resources=None, cache=None): | |
489 | """Create a templater from a string template 'tmpl'""" |
|
487 | """Create a templater from a string template 'tmpl'""" | |
490 | aliases = ui.configitems('templatealias') |
|
488 | aliases = ui.configitems('templatealias') | |
491 | t = templater.templater(defaults=defaults, resources=resources, |
|
489 | t = templater.templater(defaults=defaults, resources=resources, | |
492 | cache=cache, aliases=aliases) |
|
490 | cache=cache, aliases=aliases) | |
493 | t.cache.update((k, templater.unquotestring(v)) |
|
491 | t.cache.update((k, templater.unquotestring(v)) | |
494 | for k, v in ui.configitems('templates')) |
|
492 | for k, v in ui.configitems('templates')) | |
495 | if tmpl: |
|
493 | if tmpl: | |
496 | t.cache[''] = tmpl |
|
494 | t.cache[''] = tmpl | |
497 | return t |
|
495 | return t | |
498 |
|
496 | |||
499 | def templateresources(ui, repo=None): |
|
497 | def templateresources(ui, repo=None): | |
500 | """Create a dict of template resources designed for the default templatekw |
|
498 | """Create a dict of template resources designed for the default templatekw | |
501 | and function""" |
|
499 | and function""" | |
502 | resmap = { |
|
500 | resmap = { | |
503 | 'cache': {}, # for templatekw/funcs to store reusable data |
|
501 | 'cache': {}, # for templatekw/funcs to store reusable data | |
504 | 'repo': repo, |
|
502 | 'repo': repo, | |
505 | 'ui': ui, |
|
503 | 'ui': ui, | |
506 | } |
|
504 | } | |
507 |
|
505 | |||
508 | def getsome(context, mapping, key): |
|
506 | def getsome(context, mapping, key): | |
509 | v = mapping.get(key) |
|
507 | v = mapping.get(key) | |
510 | if v is not None: |
|
508 | if v is not None: | |
511 | return v |
|
509 | return v | |
512 | return resmap.get(key) |
|
510 | return resmap.get(key) | |
513 |
|
511 | |||
514 | def getctx(context, mapping, key): |
|
512 | def getctx(context, mapping, key): | |
515 | ctx = mapping.get('ctx') |
|
513 | ctx = mapping.get('ctx') | |
516 | if ctx is not None: |
|
514 | if ctx is not None: | |
517 | return ctx |
|
515 | return ctx | |
518 | fctx = mapping.get('fctx') |
|
516 | fctx = mapping.get('fctx') | |
519 | if fctx is not None: |
|
517 | if fctx is not None: | |
520 | return fctx.changectx() |
|
518 | return fctx.changectx() | |
521 |
|
519 | |||
522 | def getrepo(context, mapping, key): |
|
520 | def getrepo(context, mapping, key): | |
523 | ctx = getctx(context, mapping, 'ctx') |
|
521 | ctx = getctx(context, mapping, 'ctx') | |
524 | if ctx is not None: |
|
522 | if ctx is not None: | |
525 | return ctx.repo() |
|
523 | return ctx.repo() | |
526 | return getsome(context, mapping, key) |
|
524 | return getsome(context, mapping, key) | |
527 |
|
525 | |||
528 | return { |
|
526 | return { | |
529 | 'cache': getsome, |
|
527 | 'cache': getsome, | |
530 | 'ctx': getctx, |
|
528 | 'ctx': getctx, | |
531 | 'fctx': getsome, |
|
529 | 'fctx': getsome, | |
532 | 'repo': getrepo, |
|
530 | 'repo': getrepo, | |
533 | 'revcache': getsome, # per-ctx cache; set later |
|
531 | 'revcache': getsome, # per-ctx cache; set later | |
534 | 'ui': getsome, |
|
532 | 'ui': getsome, | |
535 | } |
|
533 | } | |
536 |
|
534 | |||
537 | def formatter(ui, out, topic, opts): |
|
535 | def formatter(ui, out, topic, opts): | |
538 | template = opts.get("template", "") |
|
536 | template = opts.get("template", "") | |
539 | if template == "json": |
|
537 | if template == "json": | |
540 | return jsonformatter(ui, out, topic, opts) |
|
538 | return jsonformatter(ui, out, topic, opts) | |
541 | elif template == "pickle": |
|
539 | elif template == "pickle": | |
542 | return pickleformatter(ui, out, topic, opts) |
|
540 | return pickleformatter(ui, out, topic, opts) | |
543 | elif template == "debug": |
|
541 | elif template == "debug": | |
544 | return debugformatter(ui, out, topic, opts) |
|
542 | return debugformatter(ui, out, topic, opts) | |
545 | elif template != "": |
|
543 | elif template != "": | |
546 | return templateformatter(ui, out, topic, opts) |
|
544 | return templateformatter(ui, out, topic, opts) | |
547 | # developer config: ui.formatdebug |
|
545 | # developer config: ui.formatdebug | |
548 | elif ui.configbool('ui', 'formatdebug'): |
|
546 | elif ui.configbool('ui', 'formatdebug'): | |
549 | return debugformatter(ui, out, topic, opts) |
|
547 | return debugformatter(ui, out, topic, opts) | |
550 | # deprecated config: ui.formatjson |
|
548 | # deprecated config: ui.formatjson | |
551 | elif ui.configbool('ui', 'formatjson'): |
|
549 | elif ui.configbool('ui', 'formatjson'): | |
552 | return jsonformatter(ui, out, topic, opts) |
|
550 | return jsonformatter(ui, out, topic, opts) | |
553 | return plainformatter(ui, out, topic, opts) |
|
551 | return plainformatter(ui, out, topic, opts) | |
554 |
|
552 | |||
555 | @contextlib.contextmanager |
|
553 | @contextlib.contextmanager | |
556 | def openformatter(ui, filename, topic, opts): |
|
554 | def openformatter(ui, filename, topic, opts): | |
557 | """Create a formatter that writes outputs to the specified file |
|
555 | """Create a formatter that writes outputs to the specified file | |
558 |
|
556 | |||
559 | Must be invoked using the 'with' statement. |
|
557 | Must be invoked using the 'with' statement. | |
560 | """ |
|
558 | """ | |
561 | with util.posixfile(filename, 'wb') as out: |
|
559 | with util.posixfile(filename, 'wb') as out: | |
562 | with formatter(ui, out, topic, opts) as fm: |
|
560 | with formatter(ui, out, topic, opts) as fm: | |
563 | yield fm |
|
561 | yield fm | |
564 |
|
562 | |||
565 | @contextlib.contextmanager |
|
563 | @contextlib.contextmanager | |
566 | def _neverending(fm): |
|
564 | def _neverending(fm): | |
567 | yield fm |
|
565 | yield fm | |
568 |
|
566 | |||
569 | def maybereopen(fm, filename, opts): |
|
567 | def maybereopen(fm, filename, opts): | |
570 | """Create a formatter backed by file if filename specified, else return |
|
568 | """Create a formatter backed by file if filename specified, else return | |
571 | the given formatter |
|
569 | the given formatter | |
572 |
|
570 | |||
573 | Must be invoked using the 'with' statement. This will never call fm.end() |
|
571 | Must be invoked using the 'with' statement. This will never call fm.end() | |
574 | of the given formatter. |
|
572 | of the given formatter. | |
575 | """ |
|
573 | """ | |
576 | if filename: |
|
574 | if filename: | |
577 | return openformatter(fm._ui, filename, fm._topic, opts) |
|
575 | return openformatter(fm._ui, filename, fm._topic, opts) | |
578 | else: |
|
576 | else: | |
579 | return _neverending(fm) |
|
577 | return _neverending(fm) |
@@ -1,454 +1,453 b'' | |||||
1 | # hgweb/hgweb_mod.py - Web interface for a repository. |
|
1 | # hgweb/hgweb_mod.py - Web interface for a repository. | |
2 | # |
|
2 | # | |
3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> | |
4 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
4 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
5 | # |
|
5 | # | |
6 | # This software may be used and distributed according to the terms of the |
|
6 | # This software may be used and distributed according to the terms of the | |
7 | # GNU General Public License version 2 or any later version. |
|
7 | # GNU General Public License version 2 or any later version. | |
8 |
|
8 | |||
9 | from __future__ import absolute_import |
|
9 | from __future__ import absolute_import | |
10 |
|
10 | |||
11 | import contextlib |
|
11 | import contextlib | |
12 | import os |
|
12 | import os | |
13 |
|
13 | |||
14 | from .common import ( |
|
14 | from .common import ( | |
15 | ErrorResponse, |
|
15 | ErrorResponse, | |
16 | HTTP_BAD_REQUEST, |
|
16 | HTTP_BAD_REQUEST, | |
17 | cspvalues, |
|
17 | cspvalues, | |
18 | permhooks, |
|
18 | permhooks, | |
19 | statusmessage, |
|
19 | statusmessage, | |
20 | ) |
|
20 | ) | |
21 |
|
21 | |||
22 | from .. import ( |
|
22 | from .. import ( | |
23 | encoding, |
|
23 | encoding, | |
24 | error, |
|
24 | error, | |
25 | formatter, |
|
25 | formatter, | |
26 | hg, |
|
26 | hg, | |
27 | hook, |
|
27 | hook, | |
28 | profiling, |
|
28 | profiling, | |
29 | pycompat, |
|
29 | pycompat, | |
30 | repoview, |
|
30 | repoview, | |
31 | templatefilters, |
|
31 | templatefilters, | |
32 | templater, |
|
32 | templater, | |
33 | templateutil, |
|
|||
34 | ui as uimod, |
|
33 | ui as uimod, | |
35 | util, |
|
34 | util, | |
36 | wireprotoserver, |
|
35 | wireprotoserver, | |
37 | ) |
|
36 | ) | |
38 |
|
37 | |||
39 | from . import ( |
|
38 | from . import ( | |
40 | request as requestmod, |
|
39 | request as requestmod, | |
41 | webcommands, |
|
40 | webcommands, | |
42 | webutil, |
|
41 | webutil, | |
43 | wsgicgi, |
|
42 | wsgicgi, | |
44 | ) |
|
43 | ) | |
45 |
|
44 | |||
46 | archivespecs = util.sortdict(( |
|
45 | archivespecs = util.sortdict(( | |
47 | ('zip', ('application/zip', 'zip', '.zip', None)), |
|
46 | ('zip', ('application/zip', 'zip', '.zip', None)), | |
48 | ('gz', ('application/x-gzip', 'tgz', '.tar.gz', None)), |
|
47 | ('gz', ('application/x-gzip', 'tgz', '.tar.gz', None)), | |
49 | ('bz2', ('application/x-bzip2', 'tbz2', '.tar.bz2', None)), |
|
48 | ('bz2', ('application/x-bzip2', 'tbz2', '.tar.bz2', None)), | |
50 | )) |
|
49 | )) | |
51 |
|
50 | |||
52 | def getstyle(req, configfn, templatepath): |
|
51 | def getstyle(req, configfn, templatepath): | |
53 | styles = ( |
|
52 | styles = ( | |
54 | req.qsparams.get('style', None), |
|
53 | req.qsparams.get('style', None), | |
55 | configfn('web', 'style'), |
|
54 | configfn('web', 'style'), | |
56 | 'paper', |
|
55 | 'paper', | |
57 | ) |
|
56 | ) | |
58 | return styles, templater.stylemap(styles, templatepath) |
|
57 | return styles, templater.stylemap(styles, templatepath) | |
59 |
|
58 | |||
60 | def makebreadcrumb(url, prefix=''): |
|
59 | def makebreadcrumb(url, prefix=''): | |
61 | '''Return a 'URL breadcrumb' list |
|
60 | '''Return a 'URL breadcrumb' list | |
62 |
|
61 | |||
63 | A 'URL breadcrumb' is a list of URL-name pairs, |
|
62 | A 'URL breadcrumb' is a list of URL-name pairs, | |
64 | corresponding to each of the path items on a URL. |
|
63 | corresponding to each of the path items on a URL. | |
65 | This can be used to create path navigation entries. |
|
64 | This can be used to create path navigation entries. | |
66 | ''' |
|
65 | ''' | |
67 | if url.endswith('/'): |
|
66 | if url.endswith('/'): | |
68 | url = url[:-1] |
|
67 | url = url[:-1] | |
69 | if prefix: |
|
68 | if prefix: | |
70 | url = '/' + prefix + url |
|
69 | url = '/' + prefix + url | |
71 | relpath = url |
|
70 | relpath = url | |
72 | if relpath.startswith('/'): |
|
71 | if relpath.startswith('/'): | |
73 | relpath = relpath[1:] |
|
72 | relpath = relpath[1:] | |
74 |
|
73 | |||
75 | breadcrumb = [] |
|
74 | breadcrumb = [] | |
76 | urlel = url |
|
75 | urlel = url | |
77 | pathitems = [''] + relpath.split('/') |
|
76 | pathitems = [''] + relpath.split('/') | |
78 | for pathel in reversed(pathitems): |
|
77 | for pathel in reversed(pathitems): | |
79 | if not pathel or not urlel: |
|
78 | if not pathel or not urlel: | |
80 | break |
|
79 | break | |
81 | breadcrumb.append({'url': urlel, 'name': pathel}) |
|
80 | breadcrumb.append({'url': urlel, 'name': pathel}) | |
82 | urlel = os.path.dirname(urlel) |
|
81 | urlel = os.path.dirname(urlel) | |
83 | return reversed(breadcrumb) |
|
82 | return reversed(breadcrumb) | |
84 |
|
83 | |||
85 | class requestcontext(object): |
|
84 | class requestcontext(object): | |
86 | """Holds state/context for an individual request. |
|
85 | """Holds state/context for an individual request. | |
87 |
|
86 | |||
88 | Servers can be multi-threaded. Holding state on the WSGI application |
|
87 | Servers can be multi-threaded. Holding state on the WSGI application | |
89 | is prone to race conditions. Instances of this class exist to hold |
|
88 | is prone to race conditions. Instances of this class exist to hold | |
90 | mutable and race-free state for requests. |
|
89 | mutable and race-free state for requests. | |
91 | """ |
|
90 | """ | |
92 | def __init__(self, app, repo, req, res): |
|
91 | def __init__(self, app, repo, req, res): | |
93 | self.repo = repo |
|
92 | self.repo = repo | |
94 | self.reponame = app.reponame |
|
93 | self.reponame = app.reponame | |
95 | self.req = req |
|
94 | self.req = req | |
96 | self.res = res |
|
95 | self.res = res | |
97 |
|
96 | |||
98 | self.archivespecs = archivespecs |
|
97 | self.archivespecs = archivespecs | |
99 |
|
98 | |||
100 | self.maxchanges = self.configint('web', 'maxchanges') |
|
99 | self.maxchanges = self.configint('web', 'maxchanges') | |
101 | self.stripecount = self.configint('web', 'stripes') |
|
100 | self.stripecount = self.configint('web', 'stripes') | |
102 | self.maxshortchanges = self.configint('web', 'maxshortchanges') |
|
101 | self.maxshortchanges = self.configint('web', 'maxshortchanges') | |
103 | self.maxfiles = self.configint('web', 'maxfiles') |
|
102 | self.maxfiles = self.configint('web', 'maxfiles') | |
104 | self.allowpull = self.configbool('web', 'allow-pull') |
|
103 | self.allowpull = self.configbool('web', 'allow-pull') | |
105 |
|
104 | |||
106 | # we use untrusted=False to prevent a repo owner from using |
|
105 | # we use untrusted=False to prevent a repo owner from using | |
107 | # web.templates in .hg/hgrc to get access to any file readable |
|
106 | # web.templates in .hg/hgrc to get access to any file readable | |
108 | # by the user running the CGI script |
|
107 | # by the user running the CGI script | |
109 | self.templatepath = self.config('web', 'templates', untrusted=False) |
|
108 | self.templatepath = self.config('web', 'templates', untrusted=False) | |
110 |
|
109 | |||
111 | # This object is more expensive to build than simple config values. |
|
110 | # This object is more expensive to build than simple config values. | |
112 | # It is shared across requests. The app will replace the object |
|
111 | # It is shared across requests. The app will replace the object | |
113 | # if it is updated. Since this is a reference and nothing should |
|
112 | # if it is updated. Since this is a reference and nothing should | |
114 | # modify the underlying object, it should be constant for the lifetime |
|
113 | # modify the underlying object, it should be constant for the lifetime | |
115 | # of the request. |
|
114 | # of the request. | |
116 | self.websubtable = app.websubtable |
|
115 | self.websubtable = app.websubtable | |
117 |
|
116 | |||
118 | self.csp, self.nonce = cspvalues(self.repo.ui) |
|
117 | self.csp, self.nonce = cspvalues(self.repo.ui) | |
119 |
|
118 | |||
120 | # Trust the settings from the .hg/hgrc files by default. |
|
119 | # Trust the settings from the .hg/hgrc files by default. | |
121 | def config(self, section, name, default=uimod._unset, untrusted=True): |
|
120 | def config(self, section, name, default=uimod._unset, untrusted=True): | |
122 | return self.repo.ui.config(section, name, default, |
|
121 | return self.repo.ui.config(section, name, default, | |
123 | untrusted=untrusted) |
|
122 | untrusted=untrusted) | |
124 |
|
123 | |||
125 | def configbool(self, section, name, default=uimod._unset, untrusted=True): |
|
124 | def configbool(self, section, name, default=uimod._unset, untrusted=True): | |
126 | return self.repo.ui.configbool(section, name, default, |
|
125 | return self.repo.ui.configbool(section, name, default, | |
127 | untrusted=untrusted) |
|
126 | untrusted=untrusted) | |
128 |
|
127 | |||
129 | def configint(self, section, name, default=uimod._unset, untrusted=True): |
|
128 | def configint(self, section, name, default=uimod._unset, untrusted=True): | |
130 | return self.repo.ui.configint(section, name, default, |
|
129 | return self.repo.ui.configint(section, name, default, | |
131 | untrusted=untrusted) |
|
130 | untrusted=untrusted) | |
132 |
|
131 | |||
133 | def configlist(self, section, name, default=uimod._unset, untrusted=True): |
|
132 | def configlist(self, section, name, default=uimod._unset, untrusted=True): | |
134 | return self.repo.ui.configlist(section, name, default, |
|
133 | return self.repo.ui.configlist(section, name, default, | |
135 | untrusted=untrusted) |
|
134 | untrusted=untrusted) | |
136 |
|
135 | |||
137 | def archivelist(self, nodeid): |
|
136 | def archivelist(self, nodeid): | |
138 | allowed = self.configlist('web', 'allow_archive') |
|
137 | allowed = self.configlist('web', 'allow_archive') | |
139 | for typ, spec in self.archivespecs.iteritems(): |
|
138 | for typ, spec in self.archivespecs.iteritems(): | |
140 | if typ in allowed or self.configbool('web', 'allow%s' % typ): |
|
139 | if typ in allowed or self.configbool('web', 'allow%s' % typ): | |
141 | yield {'type': typ, 'extension': spec[2], 'node': nodeid} |
|
140 | yield {'type': typ, 'extension': spec[2], 'node': nodeid} | |
142 |
|
141 | |||
143 | def templater(self, req): |
|
142 | def templater(self, req): | |
144 | # determine scheme, port and server name |
|
143 | # determine scheme, port and server name | |
145 | # this is needed to create absolute urls |
|
144 | # this is needed to create absolute urls | |
146 | logourl = self.config('web', 'logourl') |
|
145 | logourl = self.config('web', 'logourl') | |
147 | logoimg = self.config('web', 'logoimg') |
|
146 | logoimg = self.config('web', 'logoimg') | |
148 | staticurl = (self.config('web', 'staticurl') |
|
147 | staticurl = (self.config('web', 'staticurl') | |
149 | or req.apppath + '/static/') |
|
148 | or req.apppath + '/static/') | |
150 | if not staticurl.endswith('/'): |
|
149 | if not staticurl.endswith('/'): | |
151 | staticurl += '/' |
|
150 | staticurl += '/' | |
152 |
|
151 | |||
153 | # some functions for the templater |
|
152 | # some functions for the templater | |
154 |
|
153 | |||
155 | def motd(**map): |
|
154 | def motd(**map): | |
156 | yield self.config('web', 'motd') |
|
155 | yield self.config('web', 'motd') | |
157 |
|
156 | |||
158 | # figure out which style to use |
|
157 | # figure out which style to use | |
159 |
|
158 | |||
160 | vars = {} |
|
159 | vars = {} | |
161 | styles, (style, mapfile) = getstyle(req, self.config, |
|
160 | styles, (style, mapfile) = getstyle(req, self.config, | |
162 | self.templatepath) |
|
161 | self.templatepath) | |
163 | if style == styles[0]: |
|
162 | if style == styles[0]: | |
164 | vars['style'] = style |
|
163 | vars['style'] = style | |
165 |
|
164 | |||
166 | sessionvars = webutil.sessionvars(vars, '?') |
|
165 | sessionvars = webutil.sessionvars(vars, '?') | |
167 |
|
166 | |||
168 | if not self.reponame: |
|
167 | if not self.reponame: | |
169 | self.reponame = (self.config('web', 'name', '') |
|
168 | self.reponame = (self.config('web', 'name', '') | |
170 | or req.reponame |
|
169 | or req.reponame | |
171 | or req.apppath |
|
170 | or req.apppath | |
172 | or self.repo.root) |
|
171 | or self.repo.root) | |
173 |
|
172 | |||
174 | def websubfilter(text): |
|
173 | def websubfilter(text): | |
175 | return templatefilters.websub(text, self.websubtable) |
|
174 | return templatefilters.websub(text, self.websubtable) | |
176 |
|
175 | |||
177 | # create the templater |
|
176 | # create the templater | |
178 | # TODO: export all keywords: defaults = templatekw.keywords.copy() |
|
177 | # TODO: export all keywords: defaults = templatekw.keywords.copy() | |
179 | defaults = { |
|
178 | defaults = { | |
180 | 'url': req.apppath + '/', |
|
179 | 'url': req.apppath + '/', | |
181 | 'logourl': logourl, |
|
180 | 'logourl': logourl, | |
182 | 'logoimg': logoimg, |
|
181 | 'logoimg': logoimg, | |
183 | 'staticurl': staticurl, |
|
182 | 'staticurl': staticurl, | |
184 | 'urlbase': req.advertisedbaseurl, |
|
183 | 'urlbase': req.advertisedbaseurl, | |
185 | 'repo': self.reponame, |
|
184 | 'repo': self.reponame, | |
186 | 'encoding': encoding.encoding, |
|
185 | 'encoding': encoding.encoding, | |
187 | 'motd': motd, |
|
186 | 'motd': motd, | |
188 | 'sessionvars': sessionvars, |
|
187 | 'sessionvars': sessionvars, | |
189 | 'pathdef': makebreadcrumb(req.apppath), |
|
188 | 'pathdef': makebreadcrumb(req.apppath), | |
190 | 'style': style, |
|
189 | 'style': style, | |
191 | 'nonce': self.nonce, |
|
190 | 'nonce': self.nonce, | |
192 | } |
|
191 | } | |
193 | tres = formatter.templateresources(self.repo.ui, self.repo) |
|
192 | tres = formatter.templateresources(self.repo.ui, self.repo) | |
194 | tmpl = templater.templater.frommapfile(mapfile, |
|
193 | tmpl = templater.templater.frommapfile(mapfile, | |
195 | filters={'websub': websubfilter}, |
|
194 | filters={'websub': websubfilter}, | |
196 | defaults=defaults, |
|
195 | defaults=defaults, | |
197 | resources=tres) |
|
196 | resources=tres) | |
198 | return tmpl |
|
197 | return tmpl | |
199 |
|
198 | |||
200 | def sendtemplate(self, name, **kwargs): |
|
199 | def sendtemplate(self, name, **kwargs): | |
201 | """Helper function to send a response generated from a template.""" |
|
200 | """Helper function to send a response generated from a template.""" | |
202 | self.res.setbodygen(self.tmpl(name, **kwargs)) |
|
201 | self.res.setbodygen(self.tmpl(name, **kwargs)) | |
203 | return self.res.sendresponse() |
|
202 | return self.res.sendresponse() | |
204 |
|
203 | |||
205 | class hgweb(object): |
|
204 | class hgweb(object): | |
206 | """HTTP server for individual repositories. |
|
205 | """HTTP server for individual repositories. | |
207 |
|
206 | |||
208 | Instances of this class serve HTTP responses for a particular |
|
207 | Instances of this class serve HTTP responses for a particular | |
209 | repository. |
|
208 | repository. | |
210 |
|
209 | |||
211 | Instances are typically used as WSGI applications. |
|
210 | Instances are typically used as WSGI applications. | |
212 |
|
211 | |||
213 | Some servers are multi-threaded. On these servers, there may |
|
212 | Some servers are multi-threaded. On these servers, there may | |
214 | be multiple active threads inside __call__. |
|
213 | be multiple active threads inside __call__. | |
215 | """ |
|
214 | """ | |
216 | def __init__(self, repo, name=None, baseui=None): |
|
215 | def __init__(self, repo, name=None, baseui=None): | |
217 | if isinstance(repo, str): |
|
216 | if isinstance(repo, str): | |
218 | if baseui: |
|
217 | if baseui: | |
219 | u = baseui.copy() |
|
218 | u = baseui.copy() | |
220 | else: |
|
219 | else: | |
221 | u = uimod.ui.load() |
|
220 | u = uimod.ui.load() | |
222 | r = hg.repository(u, repo) |
|
221 | r = hg.repository(u, repo) | |
223 | else: |
|
222 | else: | |
224 | # we trust caller to give us a private copy |
|
223 | # we trust caller to give us a private copy | |
225 | r = repo |
|
224 | r = repo | |
226 |
|
225 | |||
227 | r.ui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') |
|
226 | r.ui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') | |
228 | r.baseui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') |
|
227 | r.baseui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') | |
229 | r.ui.setconfig('ui', 'nontty', 'true', 'hgweb') |
|
228 | r.ui.setconfig('ui', 'nontty', 'true', 'hgweb') | |
230 | r.baseui.setconfig('ui', 'nontty', 'true', 'hgweb') |
|
229 | r.baseui.setconfig('ui', 'nontty', 'true', 'hgweb') | |
231 | # resolve file patterns relative to repo root |
|
230 | # resolve file patterns relative to repo root | |
232 | r.ui.setconfig('ui', 'forcecwd', r.root, 'hgweb') |
|
231 | r.ui.setconfig('ui', 'forcecwd', r.root, 'hgweb') | |
233 | r.baseui.setconfig('ui', 'forcecwd', r.root, 'hgweb') |
|
232 | r.baseui.setconfig('ui', 'forcecwd', r.root, 'hgweb') | |
234 | # displaying bundling progress bar while serving feel wrong and may |
|
233 | # displaying bundling progress bar while serving feel wrong and may | |
235 | # break some wsgi implementation. |
|
234 | # break some wsgi implementation. | |
236 | r.ui.setconfig('progress', 'disable', 'true', 'hgweb') |
|
235 | r.ui.setconfig('progress', 'disable', 'true', 'hgweb') | |
237 | r.baseui.setconfig('progress', 'disable', 'true', 'hgweb') |
|
236 | r.baseui.setconfig('progress', 'disable', 'true', 'hgweb') | |
238 | self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))] |
|
237 | self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))] | |
239 | self._lastrepo = self._repos[0] |
|
238 | self._lastrepo = self._repos[0] | |
240 | hook.redirect(True) |
|
239 | hook.redirect(True) | |
241 | self.reponame = name |
|
240 | self.reponame = name | |
242 |
|
241 | |||
243 | def _webifyrepo(self, repo): |
|
242 | def _webifyrepo(self, repo): | |
244 | repo = getwebview(repo) |
|
243 | repo = getwebview(repo) | |
245 | self.websubtable = webutil.getwebsubs(repo) |
|
244 | self.websubtable = webutil.getwebsubs(repo) | |
246 | return repo |
|
245 | return repo | |
247 |
|
246 | |||
248 | @contextlib.contextmanager |
|
247 | @contextlib.contextmanager | |
249 | def _obtainrepo(self): |
|
248 | def _obtainrepo(self): | |
250 | """Obtain a repo unique to the caller. |
|
249 | """Obtain a repo unique to the caller. | |
251 |
|
250 | |||
252 | Internally we maintain a stack of cachedlocalrepo instances |
|
251 | Internally we maintain a stack of cachedlocalrepo instances | |
253 | to be handed out. If one is available, we pop it and return it, |
|
252 | to be handed out. If one is available, we pop it and return it, | |
254 | ensuring it is up to date in the process. If one is not available, |
|
253 | ensuring it is up to date in the process. If one is not available, | |
255 | we clone the most recently used repo instance and return it. |
|
254 | we clone the most recently used repo instance and return it. | |
256 |
|
255 | |||
257 | It is currently possible for the stack to grow without bounds |
|
256 | It is currently possible for the stack to grow without bounds | |
258 | if the server allows infinite threads. However, servers should |
|
257 | if the server allows infinite threads. However, servers should | |
259 | have a thread limit, thus establishing our limit. |
|
258 | have a thread limit, thus establishing our limit. | |
260 | """ |
|
259 | """ | |
261 | if self._repos: |
|
260 | if self._repos: | |
262 | cached = self._repos.pop() |
|
261 | cached = self._repos.pop() | |
263 | r, created = cached.fetch() |
|
262 | r, created = cached.fetch() | |
264 | else: |
|
263 | else: | |
265 | cached = self._lastrepo.copy() |
|
264 | cached = self._lastrepo.copy() | |
266 | r, created = cached.fetch() |
|
265 | r, created = cached.fetch() | |
267 | if created: |
|
266 | if created: | |
268 | r = self._webifyrepo(r) |
|
267 | r = self._webifyrepo(r) | |
269 |
|
268 | |||
270 | self._lastrepo = cached |
|
269 | self._lastrepo = cached | |
271 | self.mtime = cached.mtime |
|
270 | self.mtime = cached.mtime | |
272 | try: |
|
271 | try: | |
273 | yield r |
|
272 | yield r | |
274 | finally: |
|
273 | finally: | |
275 | self._repos.append(cached) |
|
274 | self._repos.append(cached) | |
276 |
|
275 | |||
277 | def run(self): |
|
276 | def run(self): | |
278 | """Start a server from CGI environment. |
|
277 | """Start a server from CGI environment. | |
279 |
|
278 | |||
280 | Modern servers should be using WSGI and should avoid this |
|
279 | Modern servers should be using WSGI and should avoid this | |
281 | method, if possible. |
|
280 | method, if possible. | |
282 | """ |
|
281 | """ | |
283 | if not encoding.environ.get('GATEWAY_INTERFACE', |
|
282 | if not encoding.environ.get('GATEWAY_INTERFACE', | |
284 | '').startswith("CGI/1."): |
|
283 | '').startswith("CGI/1."): | |
285 | raise RuntimeError("This function is only intended to be " |
|
284 | raise RuntimeError("This function is only intended to be " | |
286 | "called while running as a CGI script.") |
|
285 | "called while running as a CGI script.") | |
287 | wsgicgi.launch(self) |
|
286 | wsgicgi.launch(self) | |
288 |
|
287 | |||
289 | def __call__(self, env, respond): |
|
288 | def __call__(self, env, respond): | |
290 | """Run the WSGI application. |
|
289 | """Run the WSGI application. | |
291 |
|
290 | |||
292 | This may be called by multiple threads. |
|
291 | This may be called by multiple threads. | |
293 | """ |
|
292 | """ | |
294 | req = requestmod.parserequestfromenv(env) |
|
293 | req = requestmod.parserequestfromenv(env) | |
295 | res = requestmod.wsgiresponse(req, respond) |
|
294 | res = requestmod.wsgiresponse(req, respond) | |
296 |
|
295 | |||
297 | return self.run_wsgi(req, res) |
|
296 | return self.run_wsgi(req, res) | |
298 |
|
297 | |||
299 | def run_wsgi(self, req, res): |
|
298 | def run_wsgi(self, req, res): | |
300 | """Internal method to run the WSGI application. |
|
299 | """Internal method to run the WSGI application. | |
301 |
|
300 | |||
302 | This is typically only called by Mercurial. External consumers |
|
301 | This is typically only called by Mercurial. External consumers | |
303 | should be using instances of this class as the WSGI application. |
|
302 | should be using instances of this class as the WSGI application. | |
304 | """ |
|
303 | """ | |
305 | with self._obtainrepo() as repo: |
|
304 | with self._obtainrepo() as repo: | |
306 | profile = repo.ui.configbool('profiling', 'enabled') |
|
305 | profile = repo.ui.configbool('profiling', 'enabled') | |
307 | with profiling.profile(repo.ui, enabled=profile): |
|
306 | with profiling.profile(repo.ui, enabled=profile): | |
308 | for r in self._runwsgi(req, res, repo): |
|
307 | for r in self._runwsgi(req, res, repo): | |
309 | yield r |
|
308 | yield r | |
310 |
|
309 | |||
311 | def _runwsgi(self, req, res, repo): |
|
310 | def _runwsgi(self, req, res, repo): | |
312 | rctx = requestcontext(self, repo, req, res) |
|
311 | rctx = requestcontext(self, repo, req, res) | |
313 |
|
312 | |||
314 | # This state is global across all threads. |
|
313 | # This state is global across all threads. | |
315 | encoding.encoding = rctx.config('web', 'encoding') |
|
314 | encoding.encoding = rctx.config('web', 'encoding') | |
316 | rctx.repo.ui.environ = req.rawenv |
|
315 | rctx.repo.ui.environ = req.rawenv | |
317 |
|
316 | |||
318 | if rctx.csp: |
|
317 | if rctx.csp: | |
319 | # hgwebdir may have added CSP header. Since we generate our own, |
|
318 | # hgwebdir may have added CSP header. Since we generate our own, | |
320 | # replace it. |
|
319 | # replace it. | |
321 | res.headers['Content-Security-Policy'] = rctx.csp |
|
320 | res.headers['Content-Security-Policy'] = rctx.csp | |
322 |
|
321 | |||
323 | handled = wireprotoserver.handlewsgirequest( |
|
322 | handled = wireprotoserver.handlewsgirequest( | |
324 | rctx, req, res, self.check_perm) |
|
323 | rctx, req, res, self.check_perm) | |
325 | if handled: |
|
324 | if handled: | |
326 | return res.sendresponse() |
|
325 | return res.sendresponse() | |
327 |
|
326 | |||
328 | # Old implementations of hgweb supported dispatching the request via |
|
327 | # Old implementations of hgweb supported dispatching the request via | |
329 | # the initial query string parameter instead of using PATH_INFO. |
|
328 | # the initial query string parameter instead of using PATH_INFO. | |
330 | # If PATH_INFO is present (signaled by ``req.dispatchpath`` having |
|
329 | # If PATH_INFO is present (signaled by ``req.dispatchpath`` having | |
331 | # a value), we use it. Otherwise fall back to the query string. |
|
330 | # a value), we use it. Otherwise fall back to the query string. | |
332 | if req.dispatchpath is not None: |
|
331 | if req.dispatchpath is not None: | |
333 | query = req.dispatchpath |
|
332 | query = req.dispatchpath | |
334 | else: |
|
333 | else: | |
335 | query = req.querystring.partition('&')[0].partition(';')[0] |
|
334 | query = req.querystring.partition('&')[0].partition(';')[0] | |
336 |
|
335 | |||
337 | # translate user-visible url structure to internal structure |
|
336 | # translate user-visible url structure to internal structure | |
338 |
|
337 | |||
339 | args = query.split('/', 2) |
|
338 | args = query.split('/', 2) | |
340 | if 'cmd' not in req.qsparams and args and args[0]: |
|
339 | if 'cmd' not in req.qsparams and args and args[0]: | |
341 | cmd = args.pop(0) |
|
340 | cmd = args.pop(0) | |
342 | style = cmd.rfind('-') |
|
341 | style = cmd.rfind('-') | |
343 | if style != -1: |
|
342 | if style != -1: | |
344 | req.qsparams['style'] = cmd[:style] |
|
343 | req.qsparams['style'] = cmd[:style] | |
345 | cmd = cmd[style + 1:] |
|
344 | cmd = cmd[style + 1:] | |
346 |
|
345 | |||
347 | # avoid accepting e.g. style parameter as command |
|
346 | # avoid accepting e.g. style parameter as command | |
348 | if util.safehasattr(webcommands, cmd): |
|
347 | if util.safehasattr(webcommands, cmd): | |
349 | req.qsparams['cmd'] = cmd |
|
348 | req.qsparams['cmd'] = cmd | |
350 |
|
349 | |||
351 | if cmd == 'static': |
|
350 | if cmd == 'static': | |
352 | req.qsparams['file'] = '/'.join(args) |
|
351 | req.qsparams['file'] = '/'.join(args) | |
353 | else: |
|
352 | else: | |
354 | if args and args[0]: |
|
353 | if args and args[0]: | |
355 | node = args.pop(0).replace('%2F', '/') |
|
354 | node = args.pop(0).replace('%2F', '/') | |
356 | req.qsparams['node'] = node |
|
355 | req.qsparams['node'] = node | |
357 | if args: |
|
356 | if args: | |
358 | if 'file' in req.qsparams: |
|
357 | if 'file' in req.qsparams: | |
359 | del req.qsparams['file'] |
|
358 | del req.qsparams['file'] | |
360 | for a in args: |
|
359 | for a in args: | |
361 | req.qsparams.add('file', a) |
|
360 | req.qsparams.add('file', a) | |
362 |
|
361 | |||
363 | ua = req.headers.get('User-Agent', '') |
|
362 | ua = req.headers.get('User-Agent', '') | |
364 | if cmd == 'rev' and 'mercurial' in ua: |
|
363 | if cmd == 'rev' and 'mercurial' in ua: | |
365 | req.qsparams['style'] = 'raw' |
|
364 | req.qsparams['style'] = 'raw' | |
366 |
|
365 | |||
367 | if cmd == 'archive': |
|
366 | if cmd == 'archive': | |
368 | fn = req.qsparams['node'] |
|
367 | fn = req.qsparams['node'] | |
369 | for type_, spec in rctx.archivespecs.iteritems(): |
|
368 | for type_, spec in rctx.archivespecs.iteritems(): | |
370 | ext = spec[2] |
|
369 | ext = spec[2] | |
371 | if fn.endswith(ext): |
|
370 | if fn.endswith(ext): | |
372 | req.qsparams['node'] = fn[:-len(ext)] |
|
371 | req.qsparams['node'] = fn[:-len(ext)] | |
373 | req.qsparams['type'] = type_ |
|
372 | req.qsparams['type'] = type_ | |
374 | else: |
|
373 | else: | |
375 | cmd = req.qsparams.get('cmd', '') |
|
374 | cmd = req.qsparams.get('cmd', '') | |
376 |
|
375 | |||
377 | # process the web interface request |
|
376 | # process the web interface request | |
378 |
|
377 | |||
379 | try: |
|
378 | try: | |
380 | rctx.tmpl = rctx.templater(req) |
|
379 | rctx.tmpl = rctx.templater(req) | |
381 |
ctype = rctx.tmpl('mimetype', |
|
380 | ctype = rctx.tmpl.render('mimetype', | |
382 | ctype = templateutil.stringify(ctype) |
|
381 | {'encoding': encoding.encoding}) | |
383 |
|
382 | |||
384 | # check read permissions non-static content |
|
383 | # check read permissions non-static content | |
385 | if cmd != 'static': |
|
384 | if cmd != 'static': | |
386 | self.check_perm(rctx, req, None) |
|
385 | self.check_perm(rctx, req, None) | |
387 |
|
386 | |||
388 | if cmd == '': |
|
387 | if cmd == '': | |
389 | req.qsparams['cmd'] = rctx.tmpl.cache['default'] |
|
388 | req.qsparams['cmd'] = rctx.tmpl.cache['default'] | |
390 | cmd = req.qsparams['cmd'] |
|
389 | cmd = req.qsparams['cmd'] | |
391 |
|
390 | |||
392 | # Don't enable caching if using a CSP nonce because then it wouldn't |
|
391 | # Don't enable caching if using a CSP nonce because then it wouldn't | |
393 | # be a nonce. |
|
392 | # be a nonce. | |
394 | if rctx.configbool('web', 'cache') and not rctx.nonce: |
|
393 | if rctx.configbool('web', 'cache') and not rctx.nonce: | |
395 | tag = 'W/"%d"' % self.mtime |
|
394 | tag = 'W/"%d"' % self.mtime | |
396 | if req.headers.get('If-None-Match') == tag: |
|
395 | if req.headers.get('If-None-Match') == tag: | |
397 | res.status = '304 Not Modified' |
|
396 | res.status = '304 Not Modified' | |
398 | # Response body not allowed on 304. |
|
397 | # Response body not allowed on 304. | |
399 | res.setbodybytes('') |
|
398 | res.setbodybytes('') | |
400 | return res.sendresponse() |
|
399 | return res.sendresponse() | |
401 |
|
400 | |||
402 | res.headers['ETag'] = tag |
|
401 | res.headers['ETag'] = tag | |
403 |
|
402 | |||
404 | if cmd not in webcommands.__all__: |
|
403 | if cmd not in webcommands.__all__: | |
405 | msg = 'no such method: %s' % cmd |
|
404 | msg = 'no such method: %s' % cmd | |
406 | raise ErrorResponse(HTTP_BAD_REQUEST, msg) |
|
405 | raise ErrorResponse(HTTP_BAD_REQUEST, msg) | |
407 | else: |
|
406 | else: | |
408 | # Set some globals appropriate for web handlers. Commands can |
|
407 | # Set some globals appropriate for web handlers. Commands can | |
409 | # override easily enough. |
|
408 | # override easily enough. | |
410 | res.status = '200 Script output follows' |
|
409 | res.status = '200 Script output follows' | |
411 | res.headers['Content-Type'] = ctype |
|
410 | res.headers['Content-Type'] = ctype | |
412 | return getattr(webcommands, cmd)(rctx) |
|
411 | return getattr(webcommands, cmd)(rctx) | |
413 |
|
412 | |||
414 | except (error.LookupError, error.RepoLookupError) as err: |
|
413 | except (error.LookupError, error.RepoLookupError) as err: | |
415 | msg = pycompat.bytestr(err) |
|
414 | msg = pycompat.bytestr(err) | |
416 | if (util.safehasattr(err, 'name') and |
|
415 | if (util.safehasattr(err, 'name') and | |
417 | not isinstance(err, error.ManifestLookupError)): |
|
416 | not isinstance(err, error.ManifestLookupError)): | |
418 | msg = 'revision not found: %s' % err.name |
|
417 | msg = 'revision not found: %s' % err.name | |
419 |
|
418 | |||
420 | res.status = '404 Not Found' |
|
419 | res.status = '404 Not Found' | |
421 | res.headers['Content-Type'] = ctype |
|
420 | res.headers['Content-Type'] = ctype | |
422 | return rctx.sendtemplate('error', error=msg) |
|
421 | return rctx.sendtemplate('error', error=msg) | |
423 | except (error.RepoError, error.RevlogError) as e: |
|
422 | except (error.RepoError, error.RevlogError) as e: | |
424 | res.status = '500 Internal Server Error' |
|
423 | res.status = '500 Internal Server Error' | |
425 | res.headers['Content-Type'] = ctype |
|
424 | res.headers['Content-Type'] = ctype | |
426 | return rctx.sendtemplate('error', error=pycompat.bytestr(e)) |
|
425 | return rctx.sendtemplate('error', error=pycompat.bytestr(e)) | |
427 | except ErrorResponse as e: |
|
426 | except ErrorResponse as e: | |
428 | res.status = statusmessage(e.code, pycompat.bytestr(e)) |
|
427 | res.status = statusmessage(e.code, pycompat.bytestr(e)) | |
429 | res.headers['Content-Type'] = ctype |
|
428 | res.headers['Content-Type'] = ctype | |
430 | return rctx.sendtemplate('error', error=pycompat.bytestr(e)) |
|
429 | return rctx.sendtemplate('error', error=pycompat.bytestr(e)) | |
431 |
|
430 | |||
432 | def check_perm(self, rctx, req, op): |
|
431 | def check_perm(self, rctx, req, op): | |
433 | for permhook in permhooks: |
|
432 | for permhook in permhooks: | |
434 | permhook(rctx, req, op) |
|
433 | permhook(rctx, req, op) | |
435 |
|
434 | |||
436 | def getwebview(repo): |
|
435 | def getwebview(repo): | |
437 | """The 'web.view' config controls changeset filter to hgweb. Possible |
|
436 | """The 'web.view' config controls changeset filter to hgweb. Possible | |
438 | values are ``served``, ``visible`` and ``all``. Default is ``served``. |
|
437 | values are ``served``, ``visible`` and ``all``. Default is ``served``. | |
439 | The ``served`` filter only shows changesets that can be pulled from the |
|
438 | The ``served`` filter only shows changesets that can be pulled from the | |
440 | hgweb instance. The``visible`` filter includes secret changesets but |
|
439 | hgweb instance. The``visible`` filter includes secret changesets but | |
441 | still excludes "hidden" one. |
|
440 | still excludes "hidden" one. | |
442 |
|
441 | |||
443 | See the repoview module for details. |
|
442 | See the repoview module for details. | |
444 |
|
443 | |||
445 | The option has been around undocumented since Mercurial 2.5, but no |
|
444 | The option has been around undocumented since Mercurial 2.5, but no | |
446 | user ever asked about it. So we better keep it undocumented for now.""" |
|
445 | user ever asked about it. So we better keep it undocumented for now.""" | |
447 | # experimental config: web.view |
|
446 | # experimental config: web.view | |
448 | viewconfig = repo.ui.config('web', 'view', untrusted=True) |
|
447 | viewconfig = repo.ui.config('web', 'view', untrusted=True) | |
449 | if viewconfig == 'all': |
|
448 | if viewconfig == 'all': | |
450 | return repo.unfiltered() |
|
449 | return repo.unfiltered() | |
451 | elif viewconfig in repoview.filtertable: |
|
450 | elif viewconfig in repoview.filtertable: | |
452 | return repo.filtered(viewconfig) |
|
451 | return repo.filtered(viewconfig) | |
453 | else: |
|
452 | else: | |
454 | return repo.filtered('served') |
|
453 | return repo.filtered('served') |
@@ -1,538 +1,536 b'' | |||||
1 | # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories. |
|
1 | # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories. | |
2 | # |
|
2 | # | |
3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> | |
4 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> |
|
4 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> | |
5 | # |
|
5 | # | |
6 | # This software may be used and distributed according to the terms of the |
|
6 | # This software may be used and distributed according to the terms of the | |
7 | # GNU General Public License version 2 or any later version. |
|
7 | # GNU General Public License version 2 or any later version. | |
8 |
|
8 | |||
9 | from __future__ import absolute_import |
|
9 | from __future__ import absolute_import | |
10 |
|
10 | |||
11 | import gc |
|
11 | import gc | |
12 | import os |
|
12 | import os | |
13 | import time |
|
13 | import time | |
14 |
|
14 | |||
15 | from ..i18n import _ |
|
15 | from ..i18n import _ | |
16 |
|
16 | |||
17 | from .common import ( |
|
17 | from .common import ( | |
18 | ErrorResponse, |
|
18 | ErrorResponse, | |
19 | HTTP_SERVER_ERROR, |
|
19 | HTTP_SERVER_ERROR, | |
20 | cspvalues, |
|
20 | cspvalues, | |
21 | get_contact, |
|
21 | get_contact, | |
22 | get_mtime, |
|
22 | get_mtime, | |
23 | ismember, |
|
23 | ismember, | |
24 | paritygen, |
|
24 | paritygen, | |
25 | staticfile, |
|
25 | staticfile, | |
26 | statusmessage, |
|
26 | statusmessage, | |
27 | ) |
|
27 | ) | |
28 |
|
28 | |||
29 | from .. import ( |
|
29 | from .. import ( | |
30 | configitems, |
|
30 | configitems, | |
31 | encoding, |
|
31 | encoding, | |
32 | error, |
|
32 | error, | |
33 | hg, |
|
33 | hg, | |
34 | profiling, |
|
34 | profiling, | |
35 | pycompat, |
|
35 | pycompat, | |
36 | scmutil, |
|
36 | scmutil, | |
37 | templater, |
|
37 | templater, | |
38 | templateutil, |
|
|||
39 | ui as uimod, |
|
38 | ui as uimod, | |
40 | util, |
|
39 | util, | |
41 | ) |
|
40 | ) | |
42 |
|
41 | |||
43 | from . import ( |
|
42 | from . import ( | |
44 | hgweb_mod, |
|
43 | hgweb_mod, | |
45 | request as requestmod, |
|
44 | request as requestmod, | |
46 | webutil, |
|
45 | webutil, | |
47 | wsgicgi, |
|
46 | wsgicgi, | |
48 | ) |
|
47 | ) | |
49 | from ..utils import dateutil |
|
48 | from ..utils import dateutil | |
50 |
|
49 | |||
51 | def cleannames(items): |
|
50 | def cleannames(items): | |
52 | return [(util.pconvert(name).strip('/'), path) for name, path in items] |
|
51 | return [(util.pconvert(name).strip('/'), path) for name, path in items] | |
53 |
|
52 | |||
54 | def findrepos(paths): |
|
53 | def findrepos(paths): | |
55 | repos = [] |
|
54 | repos = [] | |
56 | for prefix, root in cleannames(paths): |
|
55 | for prefix, root in cleannames(paths): | |
57 | roothead, roottail = os.path.split(root) |
|
56 | roothead, roottail = os.path.split(root) | |
58 | # "foo = /bar/*" or "foo = /bar/**" lets every repo /bar/N in or below |
|
57 | # "foo = /bar/*" or "foo = /bar/**" lets every repo /bar/N in or below | |
59 | # /bar/ be served as as foo/N . |
|
58 | # /bar/ be served as as foo/N . | |
60 | # '*' will not search inside dirs with .hg (except .hg/patches), |
|
59 | # '*' will not search inside dirs with .hg (except .hg/patches), | |
61 | # '**' will search inside dirs with .hg (and thus also find subrepos). |
|
60 | # '**' will search inside dirs with .hg (and thus also find subrepos). | |
62 | try: |
|
61 | try: | |
63 | recurse = {'*': False, '**': True}[roottail] |
|
62 | recurse = {'*': False, '**': True}[roottail] | |
64 | except KeyError: |
|
63 | except KeyError: | |
65 | repos.append((prefix, root)) |
|
64 | repos.append((prefix, root)) | |
66 | continue |
|
65 | continue | |
67 | roothead = os.path.normpath(os.path.abspath(roothead)) |
|
66 | roothead = os.path.normpath(os.path.abspath(roothead)) | |
68 | paths = scmutil.walkrepos(roothead, followsym=True, recurse=recurse) |
|
67 | paths = scmutil.walkrepos(roothead, followsym=True, recurse=recurse) | |
69 | repos.extend(urlrepos(prefix, roothead, paths)) |
|
68 | repos.extend(urlrepos(prefix, roothead, paths)) | |
70 | return repos |
|
69 | return repos | |
71 |
|
70 | |||
72 | def urlrepos(prefix, roothead, paths): |
|
71 | def urlrepos(prefix, roothead, paths): | |
73 | """yield url paths and filesystem paths from a list of repo paths |
|
72 | """yield url paths and filesystem paths from a list of repo paths | |
74 |
|
73 | |||
75 | >>> conv = lambda seq: [(v, util.pconvert(p)) for v,p in seq] |
|
74 | >>> conv = lambda seq: [(v, util.pconvert(p)) for v,p in seq] | |
76 | >>> conv(urlrepos(b'hg', b'/opt', [b'/opt/r', b'/opt/r/r', b'/opt'])) |
|
75 | >>> conv(urlrepos(b'hg', b'/opt', [b'/opt/r', b'/opt/r/r', b'/opt'])) | |
77 | [('hg/r', '/opt/r'), ('hg/r/r', '/opt/r/r'), ('hg', '/opt')] |
|
76 | [('hg/r', '/opt/r'), ('hg/r/r', '/opt/r/r'), ('hg', '/opt')] | |
78 | >>> conv(urlrepos(b'', b'/opt', [b'/opt/r', b'/opt/r/r', b'/opt'])) |
|
77 | >>> conv(urlrepos(b'', b'/opt', [b'/opt/r', b'/opt/r/r', b'/opt'])) | |
79 | [('r', '/opt/r'), ('r/r', '/opt/r/r'), ('', '/opt')] |
|
78 | [('r', '/opt/r'), ('r/r', '/opt/r/r'), ('', '/opt')] | |
80 | """ |
|
79 | """ | |
81 | for path in paths: |
|
80 | for path in paths: | |
82 | path = os.path.normpath(path) |
|
81 | path = os.path.normpath(path) | |
83 | yield (prefix + '/' + |
|
82 | yield (prefix + '/' + | |
84 | util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path |
|
83 | util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path | |
85 |
|
84 | |||
86 | def readallowed(ui, req): |
|
85 | def readallowed(ui, req): | |
87 | """Check allow_read and deny_read config options of a repo's ui object |
|
86 | """Check allow_read and deny_read config options of a repo's ui object | |
88 | to determine user permissions. By default, with neither option set (or |
|
87 | to determine user permissions. By default, with neither option set (or | |
89 | both empty), allow all users to read the repo. There are two ways a |
|
88 | both empty), allow all users to read the repo. There are two ways a | |
90 | user can be denied read access: (1) deny_read is not empty, and the |
|
89 | user can be denied read access: (1) deny_read is not empty, and the | |
91 | user is unauthenticated or deny_read contains user (or *), and (2) |
|
90 | user is unauthenticated or deny_read contains user (or *), and (2) | |
92 | allow_read is not empty and the user is not in allow_read. Return True |
|
91 | allow_read is not empty and the user is not in allow_read. Return True | |
93 | if user is allowed to read the repo, else return False.""" |
|
92 | if user is allowed to read the repo, else return False.""" | |
94 |
|
93 | |||
95 | user = req.remoteuser |
|
94 | user = req.remoteuser | |
96 |
|
95 | |||
97 | deny_read = ui.configlist('web', 'deny_read', untrusted=True) |
|
96 | deny_read = ui.configlist('web', 'deny_read', untrusted=True) | |
98 | if deny_read and (not user or ismember(ui, user, deny_read)): |
|
97 | if deny_read and (not user or ismember(ui, user, deny_read)): | |
99 | return False |
|
98 | return False | |
100 |
|
99 | |||
101 | allow_read = ui.configlist('web', 'allow_read', untrusted=True) |
|
100 | allow_read = ui.configlist('web', 'allow_read', untrusted=True) | |
102 | # by default, allow reading if no allow_read option has been set |
|
101 | # by default, allow reading if no allow_read option has been set | |
103 | if not allow_read or ismember(ui, user, allow_read): |
|
102 | if not allow_read or ismember(ui, user, allow_read): | |
104 | return True |
|
103 | return True | |
105 |
|
104 | |||
106 | return False |
|
105 | return False | |
107 |
|
106 | |||
108 | def archivelist(ui, nodeid, url): |
|
107 | def archivelist(ui, nodeid, url): | |
109 | allowed = ui.configlist('web', 'allow_archive', untrusted=True) |
|
108 | allowed = ui.configlist('web', 'allow_archive', untrusted=True) | |
110 | archives = [] |
|
109 | archives = [] | |
111 |
|
110 | |||
112 | for typ, spec in hgweb_mod.archivespecs.iteritems(): |
|
111 | for typ, spec in hgweb_mod.archivespecs.iteritems(): | |
113 | if typ in allowed or ui.configbool('web', 'allow' + typ, |
|
112 | if typ in allowed or ui.configbool('web', 'allow' + typ, | |
114 | untrusted=True): |
|
113 | untrusted=True): | |
115 | archives.append({ |
|
114 | archives.append({ | |
116 | 'type': typ, |
|
115 | 'type': typ, | |
117 | 'extension': spec[2], |
|
116 | 'extension': spec[2], | |
118 | 'node': nodeid, |
|
117 | 'node': nodeid, | |
119 | 'url': url, |
|
118 | 'url': url, | |
120 | }) |
|
119 | }) | |
121 |
|
120 | |||
122 | return archives |
|
121 | return archives | |
123 |
|
122 | |||
124 | def rawindexentries(ui, repos, req, subdir=''): |
|
123 | def rawindexentries(ui, repos, req, subdir=''): | |
125 | descend = ui.configbool('web', 'descend') |
|
124 | descend = ui.configbool('web', 'descend') | |
126 | collapse = ui.configbool('web', 'collapse') |
|
125 | collapse = ui.configbool('web', 'collapse') | |
127 | seenrepos = set() |
|
126 | seenrepos = set() | |
128 | seendirs = set() |
|
127 | seendirs = set() | |
129 | for name, path in repos: |
|
128 | for name, path in repos: | |
130 |
|
129 | |||
131 | if not name.startswith(subdir): |
|
130 | if not name.startswith(subdir): | |
132 | continue |
|
131 | continue | |
133 | name = name[len(subdir):] |
|
132 | name = name[len(subdir):] | |
134 | directory = False |
|
133 | directory = False | |
135 |
|
134 | |||
136 | if '/' in name: |
|
135 | if '/' in name: | |
137 | if not descend: |
|
136 | if not descend: | |
138 | continue |
|
137 | continue | |
139 |
|
138 | |||
140 | nameparts = name.split('/') |
|
139 | nameparts = name.split('/') | |
141 | rootname = nameparts[0] |
|
140 | rootname = nameparts[0] | |
142 |
|
141 | |||
143 | if not collapse: |
|
142 | if not collapse: | |
144 | pass |
|
143 | pass | |
145 | elif rootname in seendirs: |
|
144 | elif rootname in seendirs: | |
146 | continue |
|
145 | continue | |
147 | elif rootname in seenrepos: |
|
146 | elif rootname in seenrepos: | |
148 | pass |
|
147 | pass | |
149 | else: |
|
148 | else: | |
150 | directory = True |
|
149 | directory = True | |
151 | name = rootname |
|
150 | name = rootname | |
152 |
|
151 | |||
153 | # redefine the path to refer to the directory |
|
152 | # redefine the path to refer to the directory | |
154 | discarded = '/'.join(nameparts[1:]) |
|
153 | discarded = '/'.join(nameparts[1:]) | |
155 |
|
154 | |||
156 | # remove name parts plus accompanying slash |
|
155 | # remove name parts plus accompanying slash | |
157 | path = path[:-len(discarded) - 1] |
|
156 | path = path[:-len(discarded) - 1] | |
158 |
|
157 | |||
159 | try: |
|
158 | try: | |
160 | r = hg.repository(ui, path) |
|
159 | r = hg.repository(ui, path) | |
161 | directory = False |
|
160 | directory = False | |
162 | except (IOError, error.RepoError): |
|
161 | except (IOError, error.RepoError): | |
163 | pass |
|
162 | pass | |
164 |
|
163 | |||
165 | parts = [ |
|
164 | parts = [ | |
166 | req.apppath.strip('/'), |
|
165 | req.apppath.strip('/'), | |
167 | subdir.strip('/'), |
|
166 | subdir.strip('/'), | |
168 | name.strip('/'), |
|
167 | name.strip('/'), | |
169 | ] |
|
168 | ] | |
170 | url = '/' + '/'.join(p for p in parts if p) + '/' |
|
169 | url = '/' + '/'.join(p for p in parts if p) + '/' | |
171 |
|
170 | |||
172 | # show either a directory entry or a repository |
|
171 | # show either a directory entry or a repository | |
173 | if directory: |
|
172 | if directory: | |
174 | # get the directory's time information |
|
173 | # get the directory's time information | |
175 | try: |
|
174 | try: | |
176 | d = (get_mtime(path), dateutil.makedate()[1]) |
|
175 | d = (get_mtime(path), dateutil.makedate()[1]) | |
177 | except OSError: |
|
176 | except OSError: | |
178 | continue |
|
177 | continue | |
179 |
|
178 | |||
180 | # add '/' to the name to make it obvious that |
|
179 | # add '/' to the name to make it obvious that | |
181 | # the entry is a directory, not a regular repository |
|
180 | # the entry is a directory, not a regular repository | |
182 | row = {'contact': "", |
|
181 | row = {'contact': "", | |
183 | 'contact_sort': "", |
|
182 | 'contact_sort': "", | |
184 | 'name': name + '/', |
|
183 | 'name': name + '/', | |
185 | 'name_sort': name, |
|
184 | 'name_sort': name, | |
186 | 'url': url, |
|
185 | 'url': url, | |
187 | 'description': "", |
|
186 | 'description': "", | |
188 | 'description_sort': "", |
|
187 | 'description_sort': "", | |
189 | 'lastchange': d, |
|
188 | 'lastchange': d, | |
190 | 'lastchange_sort': d[1] - d[0], |
|
189 | 'lastchange_sort': d[1] - d[0], | |
191 | 'archives': [], |
|
190 | 'archives': [], | |
192 | 'isdirectory': True, |
|
191 | 'isdirectory': True, | |
193 | 'labels': [], |
|
192 | 'labels': [], | |
194 | } |
|
193 | } | |
195 |
|
194 | |||
196 | seendirs.add(name) |
|
195 | seendirs.add(name) | |
197 | yield row |
|
196 | yield row | |
198 | continue |
|
197 | continue | |
199 |
|
198 | |||
200 | u = ui.copy() |
|
199 | u = ui.copy() | |
201 | try: |
|
200 | try: | |
202 | u.readconfig(os.path.join(path, '.hg', 'hgrc')) |
|
201 | u.readconfig(os.path.join(path, '.hg', 'hgrc')) | |
203 | except Exception as e: |
|
202 | except Exception as e: | |
204 | u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e)) |
|
203 | u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e)) | |
205 | continue |
|
204 | continue | |
206 |
|
205 | |||
207 | def get(section, name, default=uimod._unset): |
|
206 | def get(section, name, default=uimod._unset): | |
208 | return u.config(section, name, default, untrusted=True) |
|
207 | return u.config(section, name, default, untrusted=True) | |
209 |
|
208 | |||
210 | if u.configbool("web", "hidden", untrusted=True): |
|
209 | if u.configbool("web", "hidden", untrusted=True): | |
211 | continue |
|
210 | continue | |
212 |
|
211 | |||
213 | if not readallowed(u, req): |
|
212 | if not readallowed(u, req): | |
214 | continue |
|
213 | continue | |
215 |
|
214 | |||
216 | # update time with local timezone |
|
215 | # update time with local timezone | |
217 | try: |
|
216 | try: | |
218 | r = hg.repository(ui, path) |
|
217 | r = hg.repository(ui, path) | |
219 | except IOError: |
|
218 | except IOError: | |
220 | u.warn(_('error accessing repository at %s\n') % path) |
|
219 | u.warn(_('error accessing repository at %s\n') % path) | |
221 | continue |
|
220 | continue | |
222 | except error.RepoError: |
|
221 | except error.RepoError: | |
223 | u.warn(_('error accessing repository at %s\n') % path) |
|
222 | u.warn(_('error accessing repository at %s\n') % path) | |
224 | continue |
|
223 | continue | |
225 | try: |
|
224 | try: | |
226 | d = (get_mtime(r.spath), dateutil.makedate()[1]) |
|
225 | d = (get_mtime(r.spath), dateutil.makedate()[1]) | |
227 | except OSError: |
|
226 | except OSError: | |
228 | continue |
|
227 | continue | |
229 |
|
228 | |||
230 | contact = get_contact(get) |
|
229 | contact = get_contact(get) | |
231 | description = get("web", "description") |
|
230 | description = get("web", "description") | |
232 | seenrepos.add(name) |
|
231 | seenrepos.add(name) | |
233 | name = get("web", "name", name) |
|
232 | name = get("web", "name", name) | |
234 | row = {'contact': contact or "unknown", |
|
233 | row = {'contact': contact or "unknown", | |
235 | 'contact_sort': contact.upper() or "unknown", |
|
234 | 'contact_sort': contact.upper() or "unknown", | |
236 | 'name': name, |
|
235 | 'name': name, | |
237 | 'name_sort': name, |
|
236 | 'name_sort': name, | |
238 | 'url': url, |
|
237 | 'url': url, | |
239 | 'description': description or "unknown", |
|
238 | 'description': description or "unknown", | |
240 | 'description_sort': description.upper() or "unknown", |
|
239 | 'description_sort': description.upper() or "unknown", | |
241 | 'lastchange': d, |
|
240 | 'lastchange': d, | |
242 | 'lastchange_sort': d[1] - d[0], |
|
241 | 'lastchange_sort': d[1] - d[0], | |
243 | 'archives': archivelist(u, "tip", url), |
|
242 | 'archives': archivelist(u, "tip", url), | |
244 | 'isdirectory': None, |
|
243 | 'isdirectory': None, | |
245 | 'labels': u.configlist('web', 'labels', untrusted=True), |
|
244 | 'labels': u.configlist('web', 'labels', untrusted=True), | |
246 | } |
|
245 | } | |
247 |
|
246 | |||
248 | yield row |
|
247 | yield row | |
249 |
|
248 | |||
250 | def indexentries(ui, repos, req, stripecount, sortcolumn='', |
|
249 | def indexentries(ui, repos, req, stripecount, sortcolumn='', | |
251 | descending=False, subdir=''): |
|
250 | descending=False, subdir=''): | |
252 |
|
251 | |||
253 | rows = rawindexentries(ui, repos, req, subdir=subdir) |
|
252 | rows = rawindexentries(ui, repos, req, subdir=subdir) | |
254 |
|
253 | |||
255 | sortdefault = None, False |
|
254 | sortdefault = None, False | |
256 |
|
255 | |||
257 | if sortcolumn and sortdefault != (sortcolumn, descending): |
|
256 | if sortcolumn and sortdefault != (sortcolumn, descending): | |
258 | sortkey = '%s_sort' % sortcolumn |
|
257 | sortkey = '%s_sort' % sortcolumn | |
259 | rows = sorted(rows, key=lambda x: x[sortkey], |
|
258 | rows = sorted(rows, key=lambda x: x[sortkey], | |
260 | reverse=descending) |
|
259 | reverse=descending) | |
261 |
|
260 | |||
262 | for row, parity in zip(rows, paritygen(stripecount)): |
|
261 | for row, parity in zip(rows, paritygen(stripecount)): | |
263 | row['parity'] = parity |
|
262 | row['parity'] = parity | |
264 | yield row |
|
263 | yield row | |
265 |
|
264 | |||
266 | class hgwebdir(object): |
|
265 | class hgwebdir(object): | |
267 | """HTTP server for multiple repositories. |
|
266 | """HTTP server for multiple repositories. | |
268 |
|
267 | |||
269 | Given a configuration, different repositories will be served depending |
|
268 | Given a configuration, different repositories will be served depending | |
270 | on the request path. |
|
269 | on the request path. | |
271 |
|
270 | |||
272 | Instances are typically used as WSGI applications. |
|
271 | Instances are typically used as WSGI applications. | |
273 | """ |
|
272 | """ | |
274 | def __init__(self, conf, baseui=None): |
|
273 | def __init__(self, conf, baseui=None): | |
275 | self.conf = conf |
|
274 | self.conf = conf | |
276 | self.baseui = baseui |
|
275 | self.baseui = baseui | |
277 | self.ui = None |
|
276 | self.ui = None | |
278 | self.lastrefresh = 0 |
|
277 | self.lastrefresh = 0 | |
279 | self.motd = None |
|
278 | self.motd = None | |
280 | self.refresh() |
|
279 | self.refresh() | |
281 |
|
280 | |||
282 | def refresh(self): |
|
281 | def refresh(self): | |
283 | if self.ui: |
|
282 | if self.ui: | |
284 | refreshinterval = self.ui.configint('web', 'refreshinterval') |
|
283 | refreshinterval = self.ui.configint('web', 'refreshinterval') | |
285 | else: |
|
284 | else: | |
286 | item = configitems.coreitems['web']['refreshinterval'] |
|
285 | item = configitems.coreitems['web']['refreshinterval'] | |
287 | refreshinterval = item.default |
|
286 | refreshinterval = item.default | |
288 |
|
287 | |||
289 | # refreshinterval <= 0 means to always refresh. |
|
288 | # refreshinterval <= 0 means to always refresh. | |
290 | if (refreshinterval > 0 and |
|
289 | if (refreshinterval > 0 and | |
291 | self.lastrefresh + refreshinterval > time.time()): |
|
290 | self.lastrefresh + refreshinterval > time.time()): | |
292 | return |
|
291 | return | |
293 |
|
292 | |||
294 | if self.baseui: |
|
293 | if self.baseui: | |
295 | u = self.baseui.copy() |
|
294 | u = self.baseui.copy() | |
296 | else: |
|
295 | else: | |
297 | u = uimod.ui.load() |
|
296 | u = uimod.ui.load() | |
298 | u.setconfig('ui', 'report_untrusted', 'off', 'hgwebdir') |
|
297 | u.setconfig('ui', 'report_untrusted', 'off', 'hgwebdir') | |
299 | u.setconfig('ui', 'nontty', 'true', 'hgwebdir') |
|
298 | u.setconfig('ui', 'nontty', 'true', 'hgwebdir') | |
300 | # displaying bundling progress bar while serving feels wrong and may |
|
299 | # displaying bundling progress bar while serving feels wrong and may | |
301 | # break some wsgi implementations. |
|
300 | # break some wsgi implementations. | |
302 | u.setconfig('progress', 'disable', 'true', 'hgweb') |
|
301 | u.setconfig('progress', 'disable', 'true', 'hgweb') | |
303 |
|
302 | |||
304 | if not isinstance(self.conf, (dict, list, tuple)): |
|
303 | if not isinstance(self.conf, (dict, list, tuple)): | |
305 | map = {'paths': 'hgweb-paths'} |
|
304 | map = {'paths': 'hgweb-paths'} | |
306 | if not os.path.exists(self.conf): |
|
305 | if not os.path.exists(self.conf): | |
307 | raise error.Abort(_('config file %s not found!') % self.conf) |
|
306 | raise error.Abort(_('config file %s not found!') % self.conf) | |
308 | u.readconfig(self.conf, remap=map, trust=True) |
|
307 | u.readconfig(self.conf, remap=map, trust=True) | |
309 | paths = [] |
|
308 | paths = [] | |
310 | for name, ignored in u.configitems('hgweb-paths'): |
|
309 | for name, ignored in u.configitems('hgweb-paths'): | |
311 | for path in u.configlist('hgweb-paths', name): |
|
310 | for path in u.configlist('hgweb-paths', name): | |
312 | paths.append((name, path)) |
|
311 | paths.append((name, path)) | |
313 | elif isinstance(self.conf, (list, tuple)): |
|
312 | elif isinstance(self.conf, (list, tuple)): | |
314 | paths = self.conf |
|
313 | paths = self.conf | |
315 | elif isinstance(self.conf, dict): |
|
314 | elif isinstance(self.conf, dict): | |
316 | paths = self.conf.items() |
|
315 | paths = self.conf.items() | |
317 |
|
316 | |||
318 | repos = findrepos(paths) |
|
317 | repos = findrepos(paths) | |
319 | for prefix, root in u.configitems('collections'): |
|
318 | for prefix, root in u.configitems('collections'): | |
320 | prefix = util.pconvert(prefix) |
|
319 | prefix = util.pconvert(prefix) | |
321 | for path in scmutil.walkrepos(root, followsym=True): |
|
320 | for path in scmutil.walkrepos(root, followsym=True): | |
322 | repo = os.path.normpath(path) |
|
321 | repo = os.path.normpath(path) | |
323 | name = util.pconvert(repo) |
|
322 | name = util.pconvert(repo) | |
324 | if name.startswith(prefix): |
|
323 | if name.startswith(prefix): | |
325 | name = name[len(prefix):] |
|
324 | name = name[len(prefix):] | |
326 | repos.append((name.lstrip('/'), repo)) |
|
325 | repos.append((name.lstrip('/'), repo)) | |
327 |
|
326 | |||
328 | self.repos = repos |
|
327 | self.repos = repos | |
329 | self.ui = u |
|
328 | self.ui = u | |
330 | encoding.encoding = self.ui.config('web', 'encoding') |
|
329 | encoding.encoding = self.ui.config('web', 'encoding') | |
331 | self.style = self.ui.config('web', 'style') |
|
330 | self.style = self.ui.config('web', 'style') | |
332 | self.templatepath = self.ui.config('web', 'templates', untrusted=False) |
|
331 | self.templatepath = self.ui.config('web', 'templates', untrusted=False) | |
333 | self.stripecount = self.ui.config('web', 'stripes') |
|
332 | self.stripecount = self.ui.config('web', 'stripes') | |
334 | if self.stripecount: |
|
333 | if self.stripecount: | |
335 | self.stripecount = int(self.stripecount) |
|
334 | self.stripecount = int(self.stripecount) | |
336 | prefix = self.ui.config('web', 'prefix') |
|
335 | prefix = self.ui.config('web', 'prefix') | |
337 | if prefix.startswith('/'): |
|
336 | if prefix.startswith('/'): | |
338 | prefix = prefix[1:] |
|
337 | prefix = prefix[1:] | |
339 | if prefix.endswith('/'): |
|
338 | if prefix.endswith('/'): | |
340 | prefix = prefix[:-1] |
|
339 | prefix = prefix[:-1] | |
341 | self.prefix = prefix |
|
340 | self.prefix = prefix | |
342 | self.lastrefresh = time.time() |
|
341 | self.lastrefresh = time.time() | |
343 |
|
342 | |||
344 | def run(self): |
|
343 | def run(self): | |
345 | if not encoding.environ.get('GATEWAY_INTERFACE', |
|
344 | if not encoding.environ.get('GATEWAY_INTERFACE', | |
346 | '').startswith("CGI/1."): |
|
345 | '').startswith("CGI/1."): | |
347 | raise RuntimeError("This function is only intended to be " |
|
346 | raise RuntimeError("This function is only intended to be " | |
348 | "called while running as a CGI script.") |
|
347 | "called while running as a CGI script.") | |
349 | wsgicgi.launch(self) |
|
348 | wsgicgi.launch(self) | |
350 |
|
349 | |||
351 | def __call__(self, env, respond): |
|
350 | def __call__(self, env, respond): | |
352 | baseurl = self.ui.config('web', 'baseurl') |
|
351 | baseurl = self.ui.config('web', 'baseurl') | |
353 | req = requestmod.parserequestfromenv(env, altbaseurl=baseurl) |
|
352 | req = requestmod.parserequestfromenv(env, altbaseurl=baseurl) | |
354 | res = requestmod.wsgiresponse(req, respond) |
|
353 | res = requestmod.wsgiresponse(req, respond) | |
355 |
|
354 | |||
356 | return self.run_wsgi(req, res) |
|
355 | return self.run_wsgi(req, res) | |
357 |
|
356 | |||
358 | def run_wsgi(self, req, res): |
|
357 | def run_wsgi(self, req, res): | |
359 | profile = self.ui.configbool('profiling', 'enabled') |
|
358 | profile = self.ui.configbool('profiling', 'enabled') | |
360 | with profiling.profile(self.ui, enabled=profile): |
|
359 | with profiling.profile(self.ui, enabled=profile): | |
361 | try: |
|
360 | try: | |
362 | for r in self._runwsgi(req, res): |
|
361 | for r in self._runwsgi(req, res): | |
363 | yield r |
|
362 | yield r | |
364 | finally: |
|
363 | finally: | |
365 | # There are known cycles in localrepository that prevent |
|
364 | # There are known cycles in localrepository that prevent | |
366 | # those objects (and tons of held references) from being |
|
365 | # those objects (and tons of held references) from being | |
367 | # collected through normal refcounting. We mitigate those |
|
366 | # collected through normal refcounting. We mitigate those | |
368 | # leaks by performing an explicit GC on every request. |
|
367 | # leaks by performing an explicit GC on every request. | |
369 | # TODO remove this once leaks are fixed. |
|
368 | # TODO remove this once leaks are fixed. | |
370 | # TODO only run this on requests that create localrepository |
|
369 | # TODO only run this on requests that create localrepository | |
371 | # instances instead of every request. |
|
370 | # instances instead of every request. | |
372 | gc.collect() |
|
371 | gc.collect() | |
373 |
|
372 | |||
374 | def _runwsgi(self, req, res): |
|
373 | def _runwsgi(self, req, res): | |
375 | try: |
|
374 | try: | |
376 | self.refresh() |
|
375 | self.refresh() | |
377 |
|
376 | |||
378 | csp, nonce = cspvalues(self.ui) |
|
377 | csp, nonce = cspvalues(self.ui) | |
379 | if csp: |
|
378 | if csp: | |
380 | res.headers['Content-Security-Policy'] = csp |
|
379 | res.headers['Content-Security-Policy'] = csp | |
381 |
|
380 | |||
382 | virtual = req.dispatchpath.strip('/') |
|
381 | virtual = req.dispatchpath.strip('/') | |
383 | tmpl = self.templater(req, nonce) |
|
382 | tmpl = self.templater(req, nonce) | |
384 |
ctype = tmpl('mimetype', encoding |
|
383 | ctype = tmpl.render('mimetype', {'encoding': encoding.encoding}) | |
385 | ctype = templateutil.stringify(ctype) |
|
|||
386 |
|
384 | |||
387 | # Global defaults. These can be overridden by any handler. |
|
385 | # Global defaults. These can be overridden by any handler. | |
388 | res.status = '200 Script output follows' |
|
386 | res.status = '200 Script output follows' | |
389 | res.headers['Content-Type'] = ctype |
|
387 | res.headers['Content-Type'] = ctype | |
390 |
|
388 | |||
391 | # a static file |
|
389 | # a static file | |
392 | if virtual.startswith('static/') or 'static' in req.qsparams: |
|
390 | if virtual.startswith('static/') or 'static' in req.qsparams: | |
393 | if virtual.startswith('static/'): |
|
391 | if virtual.startswith('static/'): | |
394 | fname = virtual[7:] |
|
392 | fname = virtual[7:] | |
395 | else: |
|
393 | else: | |
396 | fname = req.qsparams['static'] |
|
394 | fname = req.qsparams['static'] | |
397 | static = self.ui.config("web", "static", None, |
|
395 | static = self.ui.config("web", "static", None, | |
398 | untrusted=False) |
|
396 | untrusted=False) | |
399 | if not static: |
|
397 | if not static: | |
400 | tp = self.templatepath or templater.templatepaths() |
|
398 | tp = self.templatepath or templater.templatepaths() | |
401 | if isinstance(tp, str): |
|
399 | if isinstance(tp, str): | |
402 | tp = [tp] |
|
400 | tp = [tp] | |
403 | static = [os.path.join(p, 'static') for p in tp] |
|
401 | static = [os.path.join(p, 'static') for p in tp] | |
404 |
|
402 | |||
405 | staticfile(static, fname, res) |
|
403 | staticfile(static, fname, res) | |
406 | return res.sendresponse() |
|
404 | return res.sendresponse() | |
407 |
|
405 | |||
408 | # top-level index |
|
406 | # top-level index | |
409 |
|
407 | |||
410 | repos = dict(self.repos) |
|
408 | repos = dict(self.repos) | |
411 |
|
409 | |||
412 | if (not virtual or virtual == 'index') and virtual not in repos: |
|
410 | if (not virtual or virtual == 'index') and virtual not in repos: | |
413 | return self.makeindex(req, res, tmpl) |
|
411 | return self.makeindex(req, res, tmpl) | |
414 |
|
412 | |||
415 | # nested indexes and hgwebs |
|
413 | # nested indexes and hgwebs | |
416 |
|
414 | |||
417 | if virtual.endswith('/index') and virtual not in repos: |
|
415 | if virtual.endswith('/index') and virtual not in repos: | |
418 | subdir = virtual[:-len('index')] |
|
416 | subdir = virtual[:-len('index')] | |
419 | if any(r.startswith(subdir) for r in repos): |
|
417 | if any(r.startswith(subdir) for r in repos): | |
420 | return self.makeindex(req, res, tmpl, subdir) |
|
418 | return self.makeindex(req, res, tmpl, subdir) | |
421 |
|
419 | |||
422 | def _virtualdirs(): |
|
420 | def _virtualdirs(): | |
423 | # Check the full virtual path, each parent, and the root ('') |
|
421 | # Check the full virtual path, each parent, and the root ('') | |
424 | if virtual != '': |
|
422 | if virtual != '': | |
425 | yield virtual |
|
423 | yield virtual | |
426 |
|
424 | |||
427 | for p in util.finddirs(virtual): |
|
425 | for p in util.finddirs(virtual): | |
428 | yield p |
|
426 | yield p | |
429 |
|
427 | |||
430 | yield '' |
|
428 | yield '' | |
431 |
|
429 | |||
432 | for virtualrepo in _virtualdirs(): |
|
430 | for virtualrepo in _virtualdirs(): | |
433 | real = repos.get(virtualrepo) |
|
431 | real = repos.get(virtualrepo) | |
434 | if real: |
|
432 | if real: | |
435 | # Re-parse the WSGI environment to take into account our |
|
433 | # Re-parse the WSGI environment to take into account our | |
436 | # repository path component. |
|
434 | # repository path component. | |
437 | req = requestmod.parserequestfromenv( |
|
435 | req = requestmod.parserequestfromenv( | |
438 | req.rawenv, reponame=virtualrepo, |
|
436 | req.rawenv, reponame=virtualrepo, | |
439 | altbaseurl=self.ui.config('web', 'baseurl')) |
|
437 | altbaseurl=self.ui.config('web', 'baseurl')) | |
440 | try: |
|
438 | try: | |
441 | # ensure caller gets private copy of ui |
|
439 | # ensure caller gets private copy of ui | |
442 | repo = hg.repository(self.ui.copy(), real) |
|
440 | repo = hg.repository(self.ui.copy(), real) | |
443 | return hgweb_mod.hgweb(repo).run_wsgi(req, res) |
|
441 | return hgweb_mod.hgweb(repo).run_wsgi(req, res) | |
444 | except IOError as inst: |
|
442 | except IOError as inst: | |
445 | msg = encoding.strtolocal(inst.strerror) |
|
443 | msg = encoding.strtolocal(inst.strerror) | |
446 | raise ErrorResponse(HTTP_SERVER_ERROR, msg) |
|
444 | raise ErrorResponse(HTTP_SERVER_ERROR, msg) | |
447 | except error.RepoError as inst: |
|
445 | except error.RepoError as inst: | |
448 | raise ErrorResponse(HTTP_SERVER_ERROR, bytes(inst)) |
|
446 | raise ErrorResponse(HTTP_SERVER_ERROR, bytes(inst)) | |
449 |
|
447 | |||
450 | # browse subdirectories |
|
448 | # browse subdirectories | |
451 | subdir = virtual + '/' |
|
449 | subdir = virtual + '/' | |
452 | if [r for r in repos if r.startswith(subdir)]: |
|
450 | if [r for r in repos if r.startswith(subdir)]: | |
453 | return self.makeindex(req, res, tmpl, subdir) |
|
451 | return self.makeindex(req, res, tmpl, subdir) | |
454 |
|
452 | |||
455 | # prefixes not found |
|
453 | # prefixes not found | |
456 | res.status = '404 Not Found' |
|
454 | res.status = '404 Not Found' | |
457 | res.setbodygen(tmpl('notfound', repo=virtual)) |
|
455 | res.setbodygen(tmpl('notfound', repo=virtual)) | |
458 | return res.sendresponse() |
|
456 | return res.sendresponse() | |
459 |
|
457 | |||
460 | except ErrorResponse as e: |
|
458 | except ErrorResponse as e: | |
461 | res.status = statusmessage(e.code, pycompat.bytestr(e)) |
|
459 | res.status = statusmessage(e.code, pycompat.bytestr(e)) | |
462 | res.setbodygen(tmpl('error', error=e.message or '')) |
|
460 | res.setbodygen(tmpl('error', error=e.message or '')) | |
463 | return res.sendresponse() |
|
461 | return res.sendresponse() | |
464 | finally: |
|
462 | finally: | |
465 | tmpl = None |
|
463 | tmpl = None | |
466 |
|
464 | |||
467 | def makeindex(self, req, res, tmpl, subdir=""): |
|
465 | def makeindex(self, req, res, tmpl, subdir=""): | |
468 | self.refresh() |
|
466 | self.refresh() | |
469 | sortable = ["name", "description", "contact", "lastchange"] |
|
467 | sortable = ["name", "description", "contact", "lastchange"] | |
470 | sortcolumn, descending = None, False |
|
468 | sortcolumn, descending = None, False | |
471 | if 'sort' in req.qsparams: |
|
469 | if 'sort' in req.qsparams: | |
472 | sortcolumn = req.qsparams['sort'] |
|
470 | sortcolumn = req.qsparams['sort'] | |
473 | descending = sortcolumn.startswith('-') |
|
471 | descending = sortcolumn.startswith('-') | |
474 | if descending: |
|
472 | if descending: | |
475 | sortcolumn = sortcolumn[1:] |
|
473 | sortcolumn = sortcolumn[1:] | |
476 | if sortcolumn not in sortable: |
|
474 | if sortcolumn not in sortable: | |
477 | sortcolumn = "" |
|
475 | sortcolumn = "" | |
478 |
|
476 | |||
479 | sort = [("sort_%s" % column, |
|
477 | sort = [("sort_%s" % column, | |
480 | "%s%s" % ((not descending and column == sortcolumn) |
|
478 | "%s%s" % ((not descending and column == sortcolumn) | |
481 | and "-" or "", column)) |
|
479 | and "-" or "", column)) | |
482 | for column in sortable] |
|
480 | for column in sortable] | |
483 |
|
481 | |||
484 | self.refresh() |
|
482 | self.refresh() | |
485 |
|
483 | |||
486 | entries = indexentries(self.ui, self.repos, req, |
|
484 | entries = indexentries(self.ui, self.repos, req, | |
487 | self.stripecount, sortcolumn=sortcolumn, |
|
485 | self.stripecount, sortcolumn=sortcolumn, | |
488 | descending=descending, subdir=subdir) |
|
486 | descending=descending, subdir=subdir) | |
489 |
|
487 | |||
490 | res.setbodygen(tmpl( |
|
488 | res.setbodygen(tmpl( | |
491 | 'index', |
|
489 | 'index', | |
492 | entries=entries, |
|
490 | entries=entries, | |
493 | subdir=subdir, |
|
491 | subdir=subdir, | |
494 | pathdef=hgweb_mod.makebreadcrumb('/' + subdir, self.prefix), |
|
492 | pathdef=hgweb_mod.makebreadcrumb('/' + subdir, self.prefix), | |
495 | sortcolumn=sortcolumn, |
|
493 | sortcolumn=sortcolumn, | |
496 | descending=descending, |
|
494 | descending=descending, | |
497 | **dict(sort))) |
|
495 | **dict(sort))) | |
498 |
|
496 | |||
499 | return res.sendresponse() |
|
497 | return res.sendresponse() | |
500 |
|
498 | |||
501 | def templater(self, req, nonce): |
|
499 | def templater(self, req, nonce): | |
502 |
|
500 | |||
503 | def motd(**map): |
|
501 | def motd(**map): | |
504 | if self.motd is not None: |
|
502 | if self.motd is not None: | |
505 | yield self.motd |
|
503 | yield self.motd | |
506 | else: |
|
504 | else: | |
507 | yield config('web', 'motd') |
|
505 | yield config('web', 'motd') | |
508 |
|
506 | |||
509 | def config(section, name, default=uimod._unset, untrusted=True): |
|
507 | def config(section, name, default=uimod._unset, untrusted=True): | |
510 | return self.ui.config(section, name, default, untrusted) |
|
508 | return self.ui.config(section, name, default, untrusted) | |
511 |
|
509 | |||
512 | vars = {} |
|
510 | vars = {} | |
513 | styles, (style, mapfile) = hgweb_mod.getstyle(req, config, |
|
511 | styles, (style, mapfile) = hgweb_mod.getstyle(req, config, | |
514 | self.templatepath) |
|
512 | self.templatepath) | |
515 | if style == styles[0]: |
|
513 | if style == styles[0]: | |
516 | vars['style'] = style |
|
514 | vars['style'] = style | |
517 |
|
515 | |||
518 | sessionvars = webutil.sessionvars(vars, r'?') |
|
516 | sessionvars = webutil.sessionvars(vars, r'?') | |
519 | logourl = config('web', 'logourl') |
|
517 | logourl = config('web', 'logourl') | |
520 | logoimg = config('web', 'logoimg') |
|
518 | logoimg = config('web', 'logoimg') | |
521 | staticurl = (config('web', 'staticurl') |
|
519 | staticurl = (config('web', 'staticurl') | |
522 | or req.apppath + '/static/') |
|
520 | or req.apppath + '/static/') | |
523 | if not staticurl.endswith('/'): |
|
521 | if not staticurl.endswith('/'): | |
524 | staticurl += '/' |
|
522 | staticurl += '/' | |
525 |
|
523 | |||
526 | defaults = { |
|
524 | defaults = { | |
527 | "encoding": encoding.encoding, |
|
525 | "encoding": encoding.encoding, | |
528 | "motd": motd, |
|
526 | "motd": motd, | |
529 | "url": req.apppath + '/', |
|
527 | "url": req.apppath + '/', | |
530 | "logourl": logourl, |
|
528 | "logourl": logourl, | |
531 | "logoimg": logoimg, |
|
529 | "logoimg": logoimg, | |
532 | "staticurl": staticurl, |
|
530 | "staticurl": staticurl, | |
533 | "sessionvars": sessionvars, |
|
531 | "sessionvars": sessionvars, | |
534 | "style": style, |
|
532 | "style": style, | |
535 | "nonce": nonce, |
|
533 | "nonce": nonce, | |
536 | } |
|
534 | } | |
537 | tmpl = templater.templater.frommapfile(mapfile, defaults=defaults) |
|
535 | tmpl = templater.templater.frommapfile(mapfile, defaults=defaults) | |
538 | return tmpl |
|
536 | return tmpl |
@@ -1,942 +1,936 b'' | |||||
1 | # logcmdutil.py - utility for log-like commands |
|
1 | # logcmdutil.py - utility for log-like commands | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | import itertools |
|
10 | import itertools | |
11 | import os |
|
11 | import os | |
12 |
|
12 | |||
13 | from .i18n import _ |
|
13 | from .i18n import _ | |
14 | from .node import ( |
|
14 | from .node import ( | |
15 | hex, |
|
15 | hex, | |
16 | nullid, |
|
16 | nullid, | |
17 | ) |
|
17 | ) | |
18 |
|
18 | |||
19 | from . import ( |
|
19 | from . import ( | |
20 | dagop, |
|
20 | dagop, | |
21 | encoding, |
|
21 | encoding, | |
22 | error, |
|
22 | error, | |
23 | formatter, |
|
23 | formatter, | |
24 | graphmod, |
|
24 | graphmod, | |
25 | match as matchmod, |
|
25 | match as matchmod, | |
26 | mdiff, |
|
26 | mdiff, | |
27 | patch, |
|
27 | patch, | |
28 | pathutil, |
|
28 | pathutil, | |
29 | pycompat, |
|
29 | pycompat, | |
30 | revset, |
|
30 | revset, | |
31 | revsetlang, |
|
31 | revsetlang, | |
32 | scmutil, |
|
32 | scmutil, | |
33 | smartset, |
|
33 | smartset, | |
34 | templatekw, |
|
34 | templatekw, | |
35 | templater, |
|
35 | templater, | |
36 | templateutil, |
|
|||
37 | util, |
|
36 | util, | |
38 | ) |
|
37 | ) | |
39 | from .utils import dateutil |
|
38 | from .utils import dateutil | |
40 |
|
39 | |||
41 | def getlimit(opts): |
|
40 | def getlimit(opts): | |
42 | """get the log limit according to option -l/--limit""" |
|
41 | """get the log limit according to option -l/--limit""" | |
43 | limit = opts.get('limit') |
|
42 | limit = opts.get('limit') | |
44 | if limit: |
|
43 | if limit: | |
45 | try: |
|
44 | try: | |
46 | limit = int(limit) |
|
45 | limit = int(limit) | |
47 | except ValueError: |
|
46 | except ValueError: | |
48 | raise error.Abort(_('limit must be a positive integer')) |
|
47 | raise error.Abort(_('limit must be a positive integer')) | |
49 | if limit <= 0: |
|
48 | if limit <= 0: | |
50 | raise error.Abort(_('limit must be positive')) |
|
49 | raise error.Abort(_('limit must be positive')) | |
51 | else: |
|
50 | else: | |
52 | limit = None |
|
51 | limit = None | |
53 | return limit |
|
52 | return limit | |
54 |
|
53 | |||
55 | def diffordiffstat(ui, repo, diffopts, node1, node2, match, |
|
54 | def diffordiffstat(ui, repo, diffopts, node1, node2, match, | |
56 | changes=None, stat=False, fp=None, prefix='', |
|
55 | changes=None, stat=False, fp=None, prefix='', | |
57 | root='', listsubrepos=False, hunksfilterfn=None): |
|
56 | root='', listsubrepos=False, hunksfilterfn=None): | |
58 | '''show diff or diffstat.''' |
|
57 | '''show diff or diffstat.''' | |
59 | if root: |
|
58 | if root: | |
60 | relroot = pathutil.canonpath(repo.root, repo.getcwd(), root) |
|
59 | relroot = pathutil.canonpath(repo.root, repo.getcwd(), root) | |
61 | else: |
|
60 | else: | |
62 | relroot = '' |
|
61 | relroot = '' | |
63 | if relroot != '': |
|
62 | if relroot != '': | |
64 | # XXX relative roots currently don't work if the root is within a |
|
63 | # XXX relative roots currently don't work if the root is within a | |
65 | # subrepo |
|
64 | # subrepo | |
66 | uirelroot = match.uipath(relroot) |
|
65 | uirelroot = match.uipath(relroot) | |
67 | relroot += '/' |
|
66 | relroot += '/' | |
68 | for matchroot in match.files(): |
|
67 | for matchroot in match.files(): | |
69 | if not matchroot.startswith(relroot): |
|
68 | if not matchroot.startswith(relroot): | |
70 | ui.warn(_('warning: %s not inside relative root %s\n') % ( |
|
69 | ui.warn(_('warning: %s not inside relative root %s\n') % ( | |
71 | match.uipath(matchroot), uirelroot)) |
|
70 | match.uipath(matchroot), uirelroot)) | |
72 |
|
71 | |||
73 | if stat: |
|
72 | if stat: | |
74 | diffopts = diffopts.copy(context=0, noprefix=False) |
|
73 | diffopts = diffopts.copy(context=0, noprefix=False) | |
75 | width = 80 |
|
74 | width = 80 | |
76 | if not ui.plain(): |
|
75 | if not ui.plain(): | |
77 | width = ui.termwidth() |
|
76 | width = ui.termwidth() | |
78 |
|
77 | |||
79 | chunks = patch.diff(repo, node1, node2, match, changes, opts=diffopts, |
|
78 | chunks = patch.diff(repo, node1, node2, match, changes, opts=diffopts, | |
80 | prefix=prefix, relroot=relroot, |
|
79 | prefix=prefix, relroot=relroot, | |
81 | hunksfilterfn=hunksfilterfn) |
|
80 | hunksfilterfn=hunksfilterfn) | |
82 |
|
81 | |||
83 | if fp is not None or ui.canwritewithoutlabels(): |
|
82 | if fp is not None or ui.canwritewithoutlabels(): | |
84 | out = fp or ui |
|
83 | out = fp or ui | |
85 | if stat: |
|
84 | if stat: | |
86 | chunks = [patch.diffstat(util.iterlines(chunks), width=width)] |
|
85 | chunks = [patch.diffstat(util.iterlines(chunks), width=width)] | |
87 | for chunk in util.filechunkiter(util.chunkbuffer(chunks)): |
|
86 | for chunk in util.filechunkiter(util.chunkbuffer(chunks)): | |
88 | out.write(chunk) |
|
87 | out.write(chunk) | |
89 | else: |
|
88 | else: | |
90 | if stat: |
|
89 | if stat: | |
91 | chunks = patch.diffstatui(util.iterlines(chunks), width=width) |
|
90 | chunks = patch.diffstatui(util.iterlines(chunks), width=width) | |
92 | else: |
|
91 | else: | |
93 | chunks = patch.difflabel(lambda chunks, **kwargs: chunks, chunks, |
|
92 | chunks = patch.difflabel(lambda chunks, **kwargs: chunks, chunks, | |
94 | opts=diffopts) |
|
93 | opts=diffopts) | |
95 | if ui.canbatchlabeledwrites(): |
|
94 | if ui.canbatchlabeledwrites(): | |
96 | def gen(): |
|
95 | def gen(): | |
97 | for chunk, label in chunks: |
|
96 | for chunk, label in chunks: | |
98 | yield ui.label(chunk, label=label) |
|
97 | yield ui.label(chunk, label=label) | |
99 | for chunk in util.filechunkiter(util.chunkbuffer(gen())): |
|
98 | for chunk in util.filechunkiter(util.chunkbuffer(gen())): | |
100 | ui.write(chunk) |
|
99 | ui.write(chunk) | |
101 | else: |
|
100 | else: | |
102 | for chunk, label in chunks: |
|
101 | for chunk, label in chunks: | |
103 | ui.write(chunk, label=label) |
|
102 | ui.write(chunk, label=label) | |
104 |
|
103 | |||
105 | if listsubrepos: |
|
104 | if listsubrepos: | |
106 | ctx1 = repo[node1] |
|
105 | ctx1 = repo[node1] | |
107 | ctx2 = repo[node2] |
|
106 | ctx2 = repo[node2] | |
108 | for subpath, sub in scmutil.itersubrepos(ctx1, ctx2): |
|
107 | for subpath, sub in scmutil.itersubrepos(ctx1, ctx2): | |
109 | tempnode2 = node2 |
|
108 | tempnode2 = node2 | |
110 | try: |
|
109 | try: | |
111 | if node2 is not None: |
|
110 | if node2 is not None: | |
112 | tempnode2 = ctx2.substate[subpath][1] |
|
111 | tempnode2 = ctx2.substate[subpath][1] | |
113 | except KeyError: |
|
112 | except KeyError: | |
114 | # A subrepo that existed in node1 was deleted between node1 and |
|
113 | # A subrepo that existed in node1 was deleted between node1 and | |
115 | # node2 (inclusive). Thus, ctx2's substate won't contain that |
|
114 | # node2 (inclusive). Thus, ctx2's substate won't contain that | |
116 | # subpath. The best we can do is to ignore it. |
|
115 | # subpath. The best we can do is to ignore it. | |
117 | tempnode2 = None |
|
116 | tempnode2 = None | |
118 | submatch = matchmod.subdirmatcher(subpath, match) |
|
117 | submatch = matchmod.subdirmatcher(subpath, match) | |
119 | sub.diff(ui, diffopts, tempnode2, submatch, changes=changes, |
|
118 | sub.diff(ui, diffopts, tempnode2, submatch, changes=changes, | |
120 | stat=stat, fp=fp, prefix=prefix) |
|
119 | stat=stat, fp=fp, prefix=prefix) | |
121 |
|
120 | |||
122 | class changesetdiffer(object): |
|
121 | class changesetdiffer(object): | |
123 | """Generate diff of changeset with pre-configured filtering functions""" |
|
122 | """Generate diff of changeset with pre-configured filtering functions""" | |
124 |
|
123 | |||
125 | def _makefilematcher(self, ctx): |
|
124 | def _makefilematcher(self, ctx): | |
126 | return scmutil.matchall(ctx.repo()) |
|
125 | return scmutil.matchall(ctx.repo()) | |
127 |
|
126 | |||
128 | def _makehunksfilter(self, ctx): |
|
127 | def _makehunksfilter(self, ctx): | |
129 | return None |
|
128 | return None | |
130 |
|
129 | |||
131 | def showdiff(self, ui, ctx, diffopts, stat=False): |
|
130 | def showdiff(self, ui, ctx, diffopts, stat=False): | |
132 | repo = ctx.repo() |
|
131 | repo = ctx.repo() | |
133 | node = ctx.node() |
|
132 | node = ctx.node() | |
134 | prev = ctx.p1().node() |
|
133 | prev = ctx.p1().node() | |
135 | diffordiffstat(ui, repo, diffopts, prev, node, |
|
134 | diffordiffstat(ui, repo, diffopts, prev, node, | |
136 | match=self._makefilematcher(ctx), stat=stat, |
|
135 | match=self._makefilematcher(ctx), stat=stat, | |
137 | hunksfilterfn=self._makehunksfilter(ctx)) |
|
136 | hunksfilterfn=self._makehunksfilter(ctx)) | |
138 |
|
137 | |||
139 | def changesetlabels(ctx): |
|
138 | def changesetlabels(ctx): | |
140 | labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()] |
|
139 | labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()] | |
141 | if ctx.obsolete(): |
|
140 | if ctx.obsolete(): | |
142 | labels.append('changeset.obsolete') |
|
141 | labels.append('changeset.obsolete') | |
143 | if ctx.isunstable(): |
|
142 | if ctx.isunstable(): | |
144 | labels.append('changeset.unstable') |
|
143 | labels.append('changeset.unstable') | |
145 | for instability in ctx.instabilities(): |
|
144 | for instability in ctx.instabilities(): | |
146 | labels.append('instability.%s' % instability) |
|
145 | labels.append('instability.%s' % instability) | |
147 | return ' '.join(labels) |
|
146 | return ' '.join(labels) | |
148 |
|
147 | |||
149 | class changesetprinter(object): |
|
148 | class changesetprinter(object): | |
150 | '''show changeset information when templating not requested.''' |
|
149 | '''show changeset information when templating not requested.''' | |
151 |
|
150 | |||
152 | def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False): |
|
151 | def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False): | |
153 | self.ui = ui |
|
152 | self.ui = ui | |
154 | self.repo = repo |
|
153 | self.repo = repo | |
155 | self.buffered = buffered |
|
154 | self.buffered = buffered | |
156 | self._differ = differ or changesetdiffer() |
|
155 | self._differ = differ or changesetdiffer() | |
157 | self.diffopts = diffopts or {} |
|
156 | self.diffopts = diffopts or {} | |
158 | self.header = {} |
|
157 | self.header = {} | |
159 | self.hunk = {} |
|
158 | self.hunk = {} | |
160 | self.lastheader = None |
|
159 | self.lastheader = None | |
161 | self.footer = None |
|
160 | self.footer = None | |
162 | self._columns = templatekw.getlogcolumns() |
|
161 | self._columns = templatekw.getlogcolumns() | |
163 |
|
162 | |||
164 | def flush(self, ctx): |
|
163 | def flush(self, ctx): | |
165 | rev = ctx.rev() |
|
164 | rev = ctx.rev() | |
166 | if rev in self.header: |
|
165 | if rev in self.header: | |
167 | h = self.header[rev] |
|
166 | h = self.header[rev] | |
168 | if h != self.lastheader: |
|
167 | if h != self.lastheader: | |
169 | self.lastheader = h |
|
168 | self.lastheader = h | |
170 | self.ui.write(h) |
|
169 | self.ui.write(h) | |
171 | del self.header[rev] |
|
170 | del self.header[rev] | |
172 | if rev in self.hunk: |
|
171 | if rev in self.hunk: | |
173 | self.ui.write(self.hunk[rev]) |
|
172 | self.ui.write(self.hunk[rev]) | |
174 | del self.hunk[rev] |
|
173 | del self.hunk[rev] | |
175 |
|
174 | |||
176 | def close(self): |
|
175 | def close(self): | |
177 | if self.footer: |
|
176 | if self.footer: | |
178 | self.ui.write(self.footer) |
|
177 | self.ui.write(self.footer) | |
179 |
|
178 | |||
180 | def show(self, ctx, copies=None, **props): |
|
179 | def show(self, ctx, copies=None, **props): | |
181 | props = pycompat.byteskwargs(props) |
|
180 | props = pycompat.byteskwargs(props) | |
182 | if self.buffered: |
|
181 | if self.buffered: | |
183 | self.ui.pushbuffer(labeled=True) |
|
182 | self.ui.pushbuffer(labeled=True) | |
184 | self._show(ctx, copies, props) |
|
183 | self._show(ctx, copies, props) | |
185 | self.hunk[ctx.rev()] = self.ui.popbuffer() |
|
184 | self.hunk[ctx.rev()] = self.ui.popbuffer() | |
186 | else: |
|
185 | else: | |
187 | self._show(ctx, copies, props) |
|
186 | self._show(ctx, copies, props) | |
188 |
|
187 | |||
189 | def _show(self, ctx, copies, props): |
|
188 | def _show(self, ctx, copies, props): | |
190 | '''show a single changeset or file revision''' |
|
189 | '''show a single changeset or file revision''' | |
191 | changenode = ctx.node() |
|
190 | changenode = ctx.node() | |
192 | rev = ctx.rev() |
|
191 | rev = ctx.rev() | |
193 |
|
192 | |||
194 | if self.ui.quiet: |
|
193 | if self.ui.quiet: | |
195 | self.ui.write("%s\n" % scmutil.formatchangeid(ctx), |
|
194 | self.ui.write("%s\n" % scmutil.formatchangeid(ctx), | |
196 | label='log.node') |
|
195 | label='log.node') | |
197 | return |
|
196 | return | |
198 |
|
197 | |||
199 | columns = self._columns |
|
198 | columns = self._columns | |
200 | self.ui.write(columns['changeset'] % scmutil.formatchangeid(ctx), |
|
199 | self.ui.write(columns['changeset'] % scmutil.formatchangeid(ctx), | |
201 | label=changesetlabels(ctx)) |
|
200 | label=changesetlabels(ctx)) | |
202 |
|
201 | |||
203 | # branches are shown first before any other names due to backwards |
|
202 | # branches are shown first before any other names due to backwards | |
204 | # compatibility |
|
203 | # compatibility | |
205 | branch = ctx.branch() |
|
204 | branch = ctx.branch() | |
206 | # don't show the default branch name |
|
205 | # don't show the default branch name | |
207 | if branch != 'default': |
|
206 | if branch != 'default': | |
208 | self.ui.write(columns['branch'] % branch, label='log.branch') |
|
207 | self.ui.write(columns['branch'] % branch, label='log.branch') | |
209 |
|
208 | |||
210 | for nsname, ns in self.repo.names.iteritems(): |
|
209 | for nsname, ns in self.repo.names.iteritems(): | |
211 | # branches has special logic already handled above, so here we just |
|
210 | # branches has special logic already handled above, so here we just | |
212 | # skip it |
|
211 | # skip it | |
213 | if nsname == 'branches': |
|
212 | if nsname == 'branches': | |
214 | continue |
|
213 | continue | |
215 | # we will use the templatename as the color name since those two |
|
214 | # we will use the templatename as the color name since those two | |
216 | # should be the same |
|
215 | # should be the same | |
217 | for name in ns.names(self.repo, changenode): |
|
216 | for name in ns.names(self.repo, changenode): | |
218 | self.ui.write(ns.logfmt % name, |
|
217 | self.ui.write(ns.logfmt % name, | |
219 | label='log.%s' % ns.colorname) |
|
218 | label='log.%s' % ns.colorname) | |
220 | if self.ui.debugflag: |
|
219 | if self.ui.debugflag: | |
221 | self.ui.write(columns['phase'] % ctx.phasestr(), label='log.phase') |
|
220 | self.ui.write(columns['phase'] % ctx.phasestr(), label='log.phase') | |
222 | for pctx in scmutil.meaningfulparents(self.repo, ctx): |
|
221 | for pctx in scmutil.meaningfulparents(self.repo, ctx): | |
223 | label = 'log.parent changeset.%s' % pctx.phasestr() |
|
222 | label = 'log.parent changeset.%s' % pctx.phasestr() | |
224 | self.ui.write(columns['parent'] % scmutil.formatchangeid(pctx), |
|
223 | self.ui.write(columns['parent'] % scmutil.formatchangeid(pctx), | |
225 | label=label) |
|
224 | label=label) | |
226 |
|
225 | |||
227 | if self.ui.debugflag and rev is not None: |
|
226 | if self.ui.debugflag and rev is not None: | |
228 | mnode = ctx.manifestnode() |
|
227 | mnode = ctx.manifestnode() | |
229 | mrev = self.repo.manifestlog._revlog.rev(mnode) |
|
228 | mrev = self.repo.manifestlog._revlog.rev(mnode) | |
230 | self.ui.write(columns['manifest'] |
|
229 | self.ui.write(columns['manifest'] | |
231 | % scmutil.formatrevnode(self.ui, mrev, mnode), |
|
230 | % scmutil.formatrevnode(self.ui, mrev, mnode), | |
232 | label='ui.debug log.manifest') |
|
231 | label='ui.debug log.manifest') | |
233 | self.ui.write(columns['user'] % ctx.user(), label='log.user') |
|
232 | self.ui.write(columns['user'] % ctx.user(), label='log.user') | |
234 | self.ui.write(columns['date'] % dateutil.datestr(ctx.date()), |
|
233 | self.ui.write(columns['date'] % dateutil.datestr(ctx.date()), | |
235 | label='log.date') |
|
234 | label='log.date') | |
236 |
|
235 | |||
237 | if ctx.isunstable(): |
|
236 | if ctx.isunstable(): | |
238 | instabilities = ctx.instabilities() |
|
237 | instabilities = ctx.instabilities() | |
239 | self.ui.write(columns['instability'] % ', '.join(instabilities), |
|
238 | self.ui.write(columns['instability'] % ', '.join(instabilities), | |
240 | label='log.instability') |
|
239 | label='log.instability') | |
241 |
|
240 | |||
242 | elif ctx.obsolete(): |
|
241 | elif ctx.obsolete(): | |
243 | self._showobsfate(ctx) |
|
242 | self._showobsfate(ctx) | |
244 |
|
243 | |||
245 | self._exthook(ctx) |
|
244 | self._exthook(ctx) | |
246 |
|
245 | |||
247 | if self.ui.debugflag: |
|
246 | if self.ui.debugflag: | |
248 | files = ctx.p1().status(ctx)[:3] |
|
247 | files = ctx.p1().status(ctx)[:3] | |
249 | for key, value in zip(['files', 'files+', 'files-'], files): |
|
248 | for key, value in zip(['files', 'files+', 'files-'], files): | |
250 | if value: |
|
249 | if value: | |
251 | self.ui.write(columns[key] % " ".join(value), |
|
250 | self.ui.write(columns[key] % " ".join(value), | |
252 | label='ui.debug log.files') |
|
251 | label='ui.debug log.files') | |
253 | elif ctx.files() and self.ui.verbose: |
|
252 | elif ctx.files() and self.ui.verbose: | |
254 | self.ui.write(columns['files'] % " ".join(ctx.files()), |
|
253 | self.ui.write(columns['files'] % " ".join(ctx.files()), | |
255 | label='ui.note log.files') |
|
254 | label='ui.note log.files') | |
256 | if copies and self.ui.verbose: |
|
255 | if copies and self.ui.verbose: | |
257 | copies = ['%s (%s)' % c for c in copies] |
|
256 | copies = ['%s (%s)' % c for c in copies] | |
258 | self.ui.write(columns['copies'] % ' '.join(copies), |
|
257 | self.ui.write(columns['copies'] % ' '.join(copies), | |
259 | label='ui.note log.copies') |
|
258 | label='ui.note log.copies') | |
260 |
|
259 | |||
261 | extra = ctx.extra() |
|
260 | extra = ctx.extra() | |
262 | if extra and self.ui.debugflag: |
|
261 | if extra and self.ui.debugflag: | |
263 | for key, value in sorted(extra.items()): |
|
262 | for key, value in sorted(extra.items()): | |
264 | self.ui.write(columns['extra'] % (key, util.escapestr(value)), |
|
263 | self.ui.write(columns['extra'] % (key, util.escapestr(value)), | |
265 | label='ui.debug log.extra') |
|
264 | label='ui.debug log.extra') | |
266 |
|
265 | |||
267 | description = ctx.description().strip() |
|
266 | description = ctx.description().strip() | |
268 | if description: |
|
267 | if description: | |
269 | if self.ui.verbose: |
|
268 | if self.ui.verbose: | |
270 | self.ui.write(_("description:\n"), |
|
269 | self.ui.write(_("description:\n"), | |
271 | label='ui.note log.description') |
|
270 | label='ui.note log.description') | |
272 | self.ui.write(description, |
|
271 | self.ui.write(description, | |
273 | label='ui.note log.description') |
|
272 | label='ui.note log.description') | |
274 | self.ui.write("\n\n") |
|
273 | self.ui.write("\n\n") | |
275 | else: |
|
274 | else: | |
276 | self.ui.write(columns['summary'] % description.splitlines()[0], |
|
275 | self.ui.write(columns['summary'] % description.splitlines()[0], | |
277 | label='log.summary') |
|
276 | label='log.summary') | |
278 | self.ui.write("\n") |
|
277 | self.ui.write("\n") | |
279 |
|
278 | |||
280 | self._showpatch(ctx) |
|
279 | self._showpatch(ctx) | |
281 |
|
280 | |||
282 | def _showobsfate(self, ctx): |
|
281 | def _showobsfate(self, ctx): | |
283 | # TODO: do not depend on templater |
|
282 | # TODO: do not depend on templater | |
284 | tres = formatter.templateresources(self.repo.ui, self.repo) |
|
283 | tres = formatter.templateresources(self.repo.ui, self.repo) | |
285 | t = formatter.maketemplater(self.repo.ui, '{join(obsfate, "\n")}', |
|
284 | t = formatter.maketemplater(self.repo.ui, '{join(obsfate, "\n")}', | |
286 | defaults=templatekw.keywords, |
|
285 | defaults=templatekw.keywords, | |
287 | resources=tres) |
|
286 | resources=tres) | |
288 | obsfate = t.renderdefault({'ctx': ctx, 'revcache': {}}).splitlines() |
|
287 | obsfate = t.renderdefault({'ctx': ctx, 'revcache': {}}).splitlines() | |
289 |
|
288 | |||
290 | if obsfate: |
|
289 | if obsfate: | |
291 | for obsfateline in obsfate: |
|
290 | for obsfateline in obsfate: | |
292 | self.ui.write(self._columns['obsolete'] % obsfateline, |
|
291 | self.ui.write(self._columns['obsolete'] % obsfateline, | |
293 | label='log.obsfate') |
|
292 | label='log.obsfate') | |
294 |
|
293 | |||
295 | def _exthook(self, ctx): |
|
294 | def _exthook(self, ctx): | |
296 | '''empty method used by extension as a hook point |
|
295 | '''empty method used by extension as a hook point | |
297 | ''' |
|
296 | ''' | |
298 |
|
297 | |||
299 | def _showpatch(self, ctx): |
|
298 | def _showpatch(self, ctx): | |
300 | stat = self.diffopts.get('stat') |
|
299 | stat = self.diffopts.get('stat') | |
301 | diff = self.diffopts.get('patch') |
|
300 | diff = self.diffopts.get('patch') | |
302 | diffopts = patch.diffallopts(self.ui, self.diffopts) |
|
301 | diffopts = patch.diffallopts(self.ui, self.diffopts) | |
303 | if stat: |
|
302 | if stat: | |
304 | self._differ.showdiff(self.ui, ctx, diffopts, stat=True) |
|
303 | self._differ.showdiff(self.ui, ctx, diffopts, stat=True) | |
305 | if stat and diff: |
|
304 | if stat and diff: | |
306 | self.ui.write("\n") |
|
305 | self.ui.write("\n") | |
307 | if diff: |
|
306 | if diff: | |
308 | self._differ.showdiff(self.ui, ctx, diffopts, stat=False) |
|
307 | self._differ.showdiff(self.ui, ctx, diffopts, stat=False) | |
309 | if stat or diff: |
|
308 | if stat or diff: | |
310 | self.ui.write("\n") |
|
309 | self.ui.write("\n") | |
311 |
|
310 | |||
312 | class jsonchangeset(changesetprinter): |
|
311 | class jsonchangeset(changesetprinter): | |
313 | '''format changeset information.''' |
|
312 | '''format changeset information.''' | |
314 |
|
313 | |||
315 | def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False): |
|
314 | def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False): | |
316 | changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered) |
|
315 | changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered) | |
317 | self.cache = {} |
|
316 | self.cache = {} | |
318 | self._first = True |
|
317 | self._first = True | |
319 |
|
318 | |||
320 | def close(self): |
|
319 | def close(self): | |
321 | if not self._first: |
|
320 | if not self._first: | |
322 | self.ui.write("\n]\n") |
|
321 | self.ui.write("\n]\n") | |
323 | else: |
|
322 | else: | |
324 | self.ui.write("[]\n") |
|
323 | self.ui.write("[]\n") | |
325 |
|
324 | |||
326 | def _show(self, ctx, copies, props): |
|
325 | def _show(self, ctx, copies, props): | |
327 | '''show a single changeset or file revision''' |
|
326 | '''show a single changeset or file revision''' | |
328 | rev = ctx.rev() |
|
327 | rev = ctx.rev() | |
329 | if rev is None: |
|
328 | if rev is None: | |
330 | jrev = jnode = 'null' |
|
329 | jrev = jnode = 'null' | |
331 | else: |
|
330 | else: | |
332 | jrev = '%d' % rev |
|
331 | jrev = '%d' % rev | |
333 | jnode = '"%s"' % hex(ctx.node()) |
|
332 | jnode = '"%s"' % hex(ctx.node()) | |
334 | j = encoding.jsonescape |
|
333 | j = encoding.jsonescape | |
335 |
|
334 | |||
336 | if self._first: |
|
335 | if self._first: | |
337 | self.ui.write("[\n {") |
|
336 | self.ui.write("[\n {") | |
338 | self._first = False |
|
337 | self._first = False | |
339 | else: |
|
338 | else: | |
340 | self.ui.write(",\n {") |
|
339 | self.ui.write(",\n {") | |
341 |
|
340 | |||
342 | if self.ui.quiet: |
|
341 | if self.ui.quiet: | |
343 | self.ui.write(('\n "rev": %s') % jrev) |
|
342 | self.ui.write(('\n "rev": %s') % jrev) | |
344 | self.ui.write((',\n "node": %s') % jnode) |
|
343 | self.ui.write((',\n "node": %s') % jnode) | |
345 | self.ui.write('\n }') |
|
344 | self.ui.write('\n }') | |
346 | return |
|
345 | return | |
347 |
|
346 | |||
348 | self.ui.write(('\n "rev": %s') % jrev) |
|
347 | self.ui.write(('\n "rev": %s') % jrev) | |
349 | self.ui.write((',\n "node": %s') % jnode) |
|
348 | self.ui.write((',\n "node": %s') % jnode) | |
350 | self.ui.write((',\n "branch": "%s"') % j(ctx.branch())) |
|
349 | self.ui.write((',\n "branch": "%s"') % j(ctx.branch())) | |
351 | self.ui.write((',\n "phase": "%s"') % ctx.phasestr()) |
|
350 | self.ui.write((',\n "phase": "%s"') % ctx.phasestr()) | |
352 | self.ui.write((',\n "user": "%s"') % j(ctx.user())) |
|
351 | self.ui.write((',\n "user": "%s"') % j(ctx.user())) | |
353 | self.ui.write((',\n "date": [%d, %d]') % ctx.date()) |
|
352 | self.ui.write((',\n "date": [%d, %d]') % ctx.date()) | |
354 | self.ui.write((',\n "desc": "%s"') % j(ctx.description())) |
|
353 | self.ui.write((',\n "desc": "%s"') % j(ctx.description())) | |
355 |
|
354 | |||
356 | self.ui.write((',\n "bookmarks": [%s]') % |
|
355 | self.ui.write((',\n "bookmarks": [%s]') % | |
357 | ", ".join('"%s"' % j(b) for b in ctx.bookmarks())) |
|
356 | ", ".join('"%s"' % j(b) for b in ctx.bookmarks())) | |
358 | self.ui.write((',\n "tags": [%s]') % |
|
357 | self.ui.write((',\n "tags": [%s]') % | |
359 | ", ".join('"%s"' % j(t) for t in ctx.tags())) |
|
358 | ", ".join('"%s"' % j(t) for t in ctx.tags())) | |
360 | self.ui.write((',\n "parents": [%s]') % |
|
359 | self.ui.write((',\n "parents": [%s]') % | |
361 | ", ".join('"%s"' % c.hex() for c in ctx.parents())) |
|
360 | ", ".join('"%s"' % c.hex() for c in ctx.parents())) | |
362 |
|
361 | |||
363 | if self.ui.debugflag: |
|
362 | if self.ui.debugflag: | |
364 | if rev is None: |
|
363 | if rev is None: | |
365 | jmanifestnode = 'null' |
|
364 | jmanifestnode = 'null' | |
366 | else: |
|
365 | else: | |
367 | jmanifestnode = '"%s"' % hex(ctx.manifestnode()) |
|
366 | jmanifestnode = '"%s"' % hex(ctx.manifestnode()) | |
368 | self.ui.write((',\n "manifest": %s') % jmanifestnode) |
|
367 | self.ui.write((',\n "manifest": %s') % jmanifestnode) | |
369 |
|
368 | |||
370 | self.ui.write((',\n "extra": {%s}') % |
|
369 | self.ui.write((',\n "extra": {%s}') % | |
371 | ", ".join('"%s": "%s"' % (j(k), j(v)) |
|
370 | ", ".join('"%s": "%s"' % (j(k), j(v)) | |
372 | for k, v in ctx.extra().items())) |
|
371 | for k, v in ctx.extra().items())) | |
373 |
|
372 | |||
374 | files = ctx.p1().status(ctx) |
|
373 | files = ctx.p1().status(ctx) | |
375 | self.ui.write((',\n "modified": [%s]') % |
|
374 | self.ui.write((',\n "modified": [%s]') % | |
376 | ", ".join('"%s"' % j(f) for f in files[0])) |
|
375 | ", ".join('"%s"' % j(f) for f in files[0])) | |
377 | self.ui.write((',\n "added": [%s]') % |
|
376 | self.ui.write((',\n "added": [%s]') % | |
378 | ", ".join('"%s"' % j(f) for f in files[1])) |
|
377 | ", ".join('"%s"' % j(f) for f in files[1])) | |
379 | self.ui.write((',\n "removed": [%s]') % |
|
378 | self.ui.write((',\n "removed": [%s]') % | |
380 | ", ".join('"%s"' % j(f) for f in files[2])) |
|
379 | ", ".join('"%s"' % j(f) for f in files[2])) | |
381 |
|
380 | |||
382 | elif self.ui.verbose: |
|
381 | elif self.ui.verbose: | |
383 | self.ui.write((',\n "files": [%s]') % |
|
382 | self.ui.write((',\n "files": [%s]') % | |
384 | ", ".join('"%s"' % j(f) for f in ctx.files())) |
|
383 | ", ".join('"%s"' % j(f) for f in ctx.files())) | |
385 |
|
384 | |||
386 | if copies: |
|
385 | if copies: | |
387 | self.ui.write((',\n "copies": {%s}') % |
|
386 | self.ui.write((',\n "copies": {%s}') % | |
388 | ", ".join('"%s": "%s"' % (j(k), j(v)) |
|
387 | ", ".join('"%s": "%s"' % (j(k), j(v)) | |
389 | for k, v in copies)) |
|
388 | for k, v in copies)) | |
390 |
|
389 | |||
391 | stat = self.diffopts.get('stat') |
|
390 | stat = self.diffopts.get('stat') | |
392 | diff = self.diffopts.get('patch') |
|
391 | diff = self.diffopts.get('patch') | |
393 | diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True) |
|
392 | diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True) | |
394 | if stat: |
|
393 | if stat: | |
395 | self.ui.pushbuffer() |
|
394 | self.ui.pushbuffer() | |
396 | self._differ.showdiff(self.ui, ctx, diffopts, stat=True) |
|
395 | self._differ.showdiff(self.ui, ctx, diffopts, stat=True) | |
397 | self.ui.write((',\n "diffstat": "%s"') |
|
396 | self.ui.write((',\n "diffstat": "%s"') | |
398 | % j(self.ui.popbuffer())) |
|
397 | % j(self.ui.popbuffer())) | |
399 | if diff: |
|
398 | if diff: | |
400 | self.ui.pushbuffer() |
|
399 | self.ui.pushbuffer() | |
401 | self._differ.showdiff(self.ui, ctx, diffopts, stat=False) |
|
400 | self._differ.showdiff(self.ui, ctx, diffopts, stat=False) | |
402 | self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer())) |
|
401 | self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer())) | |
403 |
|
402 | |||
404 | self.ui.write("\n }") |
|
403 | self.ui.write("\n }") | |
405 |
|
404 | |||
406 | class changesettemplater(changesetprinter): |
|
405 | class changesettemplater(changesetprinter): | |
407 | '''format changeset information. |
|
406 | '''format changeset information. | |
408 |
|
407 | |||
409 | Note: there are a variety of convenience functions to build a |
|
408 | Note: there are a variety of convenience functions to build a | |
410 | changesettemplater for common cases. See functions such as: |
|
409 | changesettemplater for common cases. See functions such as: | |
411 | maketemplater, changesetdisplayer, buildcommittemplate, or other |
|
410 | maketemplater, changesetdisplayer, buildcommittemplate, or other | |
412 | functions that use changesest_templater. |
|
411 | functions that use changesest_templater. | |
413 | ''' |
|
412 | ''' | |
414 |
|
413 | |||
415 | # Arguments before "buffered" used to be positional. Consider not |
|
414 | # Arguments before "buffered" used to be positional. Consider not | |
416 | # adding/removing arguments before "buffered" to not break callers. |
|
415 | # adding/removing arguments before "buffered" to not break callers. | |
417 | def __init__(self, ui, repo, tmplspec, differ=None, diffopts=None, |
|
416 | def __init__(self, ui, repo, tmplspec, differ=None, diffopts=None, | |
418 | buffered=False): |
|
417 | buffered=False): | |
419 | changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered) |
|
418 | changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered) | |
420 | tres = formatter.templateresources(ui, repo) |
|
419 | tres = formatter.templateresources(ui, repo) | |
421 | self.t = formatter.loadtemplater(ui, tmplspec, |
|
420 | self.t = formatter.loadtemplater(ui, tmplspec, | |
422 | defaults=templatekw.keywords, |
|
421 | defaults=templatekw.keywords, | |
423 | resources=tres, |
|
422 | resources=tres, | |
424 | cache=templatekw.defaulttempl) |
|
423 | cache=templatekw.defaulttempl) | |
425 | self._counter = itertools.count() |
|
424 | self._counter = itertools.count() | |
426 | self._getcache = tres['cache'] # shared with _graphnodeformatter() |
|
425 | self._getcache = tres['cache'] # shared with _graphnodeformatter() | |
427 |
|
426 | |||
428 | self._tref = tmplspec.ref |
|
427 | self._tref = tmplspec.ref | |
429 | self._parts = {'header': '', 'footer': '', |
|
428 | self._parts = {'header': '', 'footer': '', | |
430 | tmplspec.ref: tmplspec.ref, |
|
429 | tmplspec.ref: tmplspec.ref, | |
431 | 'docheader': '', 'docfooter': '', |
|
430 | 'docheader': '', 'docfooter': '', | |
432 | 'separator': ''} |
|
431 | 'separator': ''} | |
433 | if tmplspec.mapfile: |
|
432 | if tmplspec.mapfile: | |
434 | # find correct templates for current mode, for backward |
|
433 | # find correct templates for current mode, for backward | |
435 | # compatibility with 'log -v/-q/--debug' using a mapfile |
|
434 | # compatibility with 'log -v/-q/--debug' using a mapfile | |
436 | tmplmodes = [ |
|
435 | tmplmodes = [ | |
437 | (True, ''), |
|
436 | (True, ''), | |
438 | (self.ui.verbose, '_verbose'), |
|
437 | (self.ui.verbose, '_verbose'), | |
439 | (self.ui.quiet, '_quiet'), |
|
438 | (self.ui.quiet, '_quiet'), | |
440 | (self.ui.debugflag, '_debug'), |
|
439 | (self.ui.debugflag, '_debug'), | |
441 | ] |
|
440 | ] | |
442 | for mode, postfix in tmplmodes: |
|
441 | for mode, postfix in tmplmodes: | |
443 | for t in self._parts: |
|
442 | for t in self._parts: | |
444 | cur = t + postfix |
|
443 | cur = t + postfix | |
445 | if mode and cur in self.t: |
|
444 | if mode and cur in self.t: | |
446 | self._parts[t] = cur |
|
445 | self._parts[t] = cur | |
447 | else: |
|
446 | else: | |
448 | partnames = [p for p in self._parts.keys() if p != tmplspec.ref] |
|
447 | partnames = [p for p in self._parts.keys() if p != tmplspec.ref] | |
449 | m = formatter.templatepartsmap(tmplspec, self.t, partnames) |
|
448 | m = formatter.templatepartsmap(tmplspec, self.t, partnames) | |
450 | self._parts.update(m) |
|
449 | self._parts.update(m) | |
451 |
|
450 | |||
452 | if self._parts['docheader']: |
|
451 | if self._parts['docheader']: | |
453 | self.ui.write( |
|
452 | self.ui.write(self.t.render(self._parts['docheader'], {})) | |
454 | templateutil.stringify(self.t(self._parts['docheader']))) |
|
|||
455 |
|
453 | |||
456 | def close(self): |
|
454 | def close(self): | |
457 | if self._parts['docfooter']: |
|
455 | if self._parts['docfooter']: | |
458 | if not self.footer: |
|
456 | if not self.footer: | |
459 | self.footer = "" |
|
457 | self.footer = "" | |
460 | self.footer += templateutil.stringify( |
|
458 | self.footer += self.t.render(self._parts['docfooter'], {}) | |
461 | self.t(self._parts['docfooter'])) |
|
|||
462 | return super(changesettemplater, self).close() |
|
459 | return super(changesettemplater, self).close() | |
463 |
|
460 | |||
464 | def _show(self, ctx, copies, props): |
|
461 | def _show(self, ctx, copies, props): | |
465 | '''show a single changeset or file revision''' |
|
462 | '''show a single changeset or file revision''' | |
466 | props = props.copy() |
|
463 | props = props.copy() | |
467 | props['ctx'] = ctx |
|
464 | props['ctx'] = ctx | |
468 | props['index'] = index = next(self._counter) |
|
465 | props['index'] = index = next(self._counter) | |
469 | props['revcache'] = {'copies': copies} |
|
466 | props['revcache'] = {'copies': copies} | |
470 | props = pycompat.strkwargs(props) |
|
|||
471 |
|
467 | |||
472 | # write separator, which wouldn't work well with the header part below |
|
468 | # write separator, which wouldn't work well with the header part below | |
473 | # since there's inherently a conflict between header (across items) and |
|
469 | # since there's inherently a conflict between header (across items) and | |
474 | # separator (per item) |
|
470 | # separator (per item) | |
475 | if self._parts['separator'] and index > 0: |
|
471 | if self._parts['separator'] and index > 0: | |
476 | self.ui.write( |
|
472 | self.ui.write(self.t.render(self._parts['separator'], {})) | |
477 | templateutil.stringify(self.t(self._parts['separator']))) |
|
|||
478 |
|
473 | |||
479 | # write header |
|
474 | # write header | |
480 | if self._parts['header']: |
|
475 | if self._parts['header']: | |
481 |
h = |
|
476 | h = self.t.render(self._parts['header'], props) | |
482 | if self.buffered: |
|
477 | if self.buffered: | |
483 | self.header[ctx.rev()] = h |
|
478 | self.header[ctx.rev()] = h | |
484 | else: |
|
479 | else: | |
485 | if self.lastheader != h: |
|
480 | if self.lastheader != h: | |
486 | self.lastheader = h |
|
481 | self.lastheader = h | |
487 | self.ui.write(h) |
|
482 | self.ui.write(h) | |
488 |
|
483 | |||
489 | # write changeset metadata, then patch if requested |
|
484 | # write changeset metadata, then patch if requested | |
490 | key = self._parts[self._tref] |
|
485 | key = self._parts[self._tref] | |
491 |
self.ui.write( |
|
486 | self.ui.write(self.t.render(key, props)) | |
492 | self._showpatch(ctx) |
|
487 | self._showpatch(ctx) | |
493 |
|
488 | |||
494 | if self._parts['footer']: |
|
489 | if self._parts['footer']: | |
495 | if not self.footer: |
|
490 | if not self.footer: | |
496 | self.footer = templateutil.stringify( |
|
491 | self.footer = self.t.render(self._parts['footer'], props) | |
497 | self.t(self._parts['footer'], **props)) |
|
|||
498 |
|
492 | |||
499 | def templatespec(tmpl, mapfile): |
|
493 | def templatespec(tmpl, mapfile): | |
500 | if mapfile: |
|
494 | if mapfile: | |
501 | return formatter.templatespec('changeset', tmpl, mapfile) |
|
495 | return formatter.templatespec('changeset', tmpl, mapfile) | |
502 | else: |
|
496 | else: | |
503 | return formatter.templatespec('', tmpl, None) |
|
497 | return formatter.templatespec('', tmpl, None) | |
504 |
|
498 | |||
505 | def _lookuptemplate(ui, tmpl, style): |
|
499 | def _lookuptemplate(ui, tmpl, style): | |
506 | """Find the template matching the given template spec or style |
|
500 | """Find the template matching the given template spec or style | |
507 |
|
501 | |||
508 | See formatter.lookuptemplate() for details. |
|
502 | See formatter.lookuptemplate() for details. | |
509 | """ |
|
503 | """ | |
510 |
|
504 | |||
511 | # ui settings |
|
505 | # ui settings | |
512 | if not tmpl and not style: # template are stronger than style |
|
506 | if not tmpl and not style: # template are stronger than style | |
513 | tmpl = ui.config('ui', 'logtemplate') |
|
507 | tmpl = ui.config('ui', 'logtemplate') | |
514 | if tmpl: |
|
508 | if tmpl: | |
515 | return templatespec(templater.unquotestring(tmpl), None) |
|
509 | return templatespec(templater.unquotestring(tmpl), None) | |
516 | else: |
|
510 | else: | |
517 | style = util.expandpath(ui.config('ui', 'style')) |
|
511 | style = util.expandpath(ui.config('ui', 'style')) | |
518 |
|
512 | |||
519 | if not tmpl and style: |
|
513 | if not tmpl and style: | |
520 | mapfile = style |
|
514 | mapfile = style | |
521 | if not os.path.split(mapfile)[0]: |
|
515 | if not os.path.split(mapfile)[0]: | |
522 | mapname = (templater.templatepath('map-cmdline.' + mapfile) |
|
516 | mapname = (templater.templatepath('map-cmdline.' + mapfile) | |
523 | or templater.templatepath(mapfile)) |
|
517 | or templater.templatepath(mapfile)) | |
524 | if mapname: |
|
518 | if mapname: | |
525 | mapfile = mapname |
|
519 | mapfile = mapname | |
526 | return templatespec(None, mapfile) |
|
520 | return templatespec(None, mapfile) | |
527 |
|
521 | |||
528 | if not tmpl: |
|
522 | if not tmpl: | |
529 | return templatespec(None, None) |
|
523 | return templatespec(None, None) | |
530 |
|
524 | |||
531 | return formatter.lookuptemplate(ui, 'changeset', tmpl) |
|
525 | return formatter.lookuptemplate(ui, 'changeset', tmpl) | |
532 |
|
526 | |||
533 | def maketemplater(ui, repo, tmpl, buffered=False): |
|
527 | def maketemplater(ui, repo, tmpl, buffered=False): | |
534 | """Create a changesettemplater from a literal template 'tmpl' |
|
528 | """Create a changesettemplater from a literal template 'tmpl' | |
535 | byte-string.""" |
|
529 | byte-string.""" | |
536 | spec = templatespec(tmpl, None) |
|
530 | spec = templatespec(tmpl, None) | |
537 | return changesettemplater(ui, repo, spec, buffered=buffered) |
|
531 | return changesettemplater(ui, repo, spec, buffered=buffered) | |
538 |
|
532 | |||
539 | def changesetdisplayer(ui, repo, opts, differ=None, buffered=False): |
|
533 | def changesetdisplayer(ui, repo, opts, differ=None, buffered=False): | |
540 | """show one changeset using template or regular display. |
|
534 | """show one changeset using template or regular display. | |
541 |
|
535 | |||
542 | Display format will be the first non-empty hit of: |
|
536 | Display format will be the first non-empty hit of: | |
543 | 1. option 'template' |
|
537 | 1. option 'template' | |
544 | 2. option 'style' |
|
538 | 2. option 'style' | |
545 | 3. [ui] setting 'logtemplate' |
|
539 | 3. [ui] setting 'logtemplate' | |
546 | 4. [ui] setting 'style' |
|
540 | 4. [ui] setting 'style' | |
547 | If all of these values are either the unset or the empty string, |
|
541 | If all of these values are either the unset or the empty string, | |
548 | regular display via changesetprinter() is done. |
|
542 | regular display via changesetprinter() is done. | |
549 | """ |
|
543 | """ | |
550 | postargs = (differ, opts, buffered) |
|
544 | postargs = (differ, opts, buffered) | |
551 | if opts.get('template') == 'json': |
|
545 | if opts.get('template') == 'json': | |
552 | return jsonchangeset(ui, repo, *postargs) |
|
546 | return jsonchangeset(ui, repo, *postargs) | |
553 |
|
547 | |||
554 | spec = _lookuptemplate(ui, opts.get('template'), opts.get('style')) |
|
548 | spec = _lookuptemplate(ui, opts.get('template'), opts.get('style')) | |
555 |
|
549 | |||
556 | if not spec.ref and not spec.tmpl and not spec.mapfile: |
|
550 | if not spec.ref and not spec.tmpl and not spec.mapfile: | |
557 | return changesetprinter(ui, repo, *postargs) |
|
551 | return changesetprinter(ui, repo, *postargs) | |
558 |
|
552 | |||
559 | return changesettemplater(ui, repo, spec, *postargs) |
|
553 | return changesettemplater(ui, repo, spec, *postargs) | |
560 |
|
554 | |||
561 | def _makematcher(repo, revs, pats, opts): |
|
555 | def _makematcher(repo, revs, pats, opts): | |
562 | """Build matcher and expanded patterns from log options |
|
556 | """Build matcher and expanded patterns from log options | |
563 |
|
557 | |||
564 | If --follow, revs are the revisions to follow from. |
|
558 | If --follow, revs are the revisions to follow from. | |
565 |
|
559 | |||
566 | Returns (match, pats, slowpath) where |
|
560 | Returns (match, pats, slowpath) where | |
567 | - match: a matcher built from the given pats and -I/-X opts |
|
561 | - match: a matcher built from the given pats and -I/-X opts | |
568 | - pats: patterns used (globs are expanded on Windows) |
|
562 | - pats: patterns used (globs are expanded on Windows) | |
569 | - slowpath: True if patterns aren't as simple as scanning filelogs |
|
563 | - slowpath: True if patterns aren't as simple as scanning filelogs | |
570 | """ |
|
564 | """ | |
571 | # pats/include/exclude are passed to match.match() directly in |
|
565 | # pats/include/exclude are passed to match.match() directly in | |
572 | # _matchfiles() revset but walkchangerevs() builds its matcher with |
|
566 | # _matchfiles() revset but walkchangerevs() builds its matcher with | |
573 | # scmutil.match(). The difference is input pats are globbed on |
|
567 | # scmutil.match(). The difference is input pats are globbed on | |
574 | # platforms without shell expansion (windows). |
|
568 | # platforms without shell expansion (windows). | |
575 | wctx = repo[None] |
|
569 | wctx = repo[None] | |
576 | match, pats = scmutil.matchandpats(wctx, pats, opts) |
|
570 | match, pats = scmutil.matchandpats(wctx, pats, opts) | |
577 | slowpath = match.anypats() or (not match.always() and opts.get('removed')) |
|
571 | slowpath = match.anypats() or (not match.always() and opts.get('removed')) | |
578 | if not slowpath: |
|
572 | if not slowpath: | |
579 | follow = opts.get('follow') or opts.get('follow_first') |
|
573 | follow = opts.get('follow') or opts.get('follow_first') | |
580 | startctxs = [] |
|
574 | startctxs = [] | |
581 | if follow and opts.get('rev'): |
|
575 | if follow and opts.get('rev'): | |
582 | startctxs = [repo[r] for r in revs] |
|
576 | startctxs = [repo[r] for r in revs] | |
583 | for f in match.files(): |
|
577 | for f in match.files(): | |
584 | if follow and startctxs: |
|
578 | if follow and startctxs: | |
585 | # No idea if the path was a directory at that revision, so |
|
579 | # No idea if the path was a directory at that revision, so | |
586 | # take the slow path. |
|
580 | # take the slow path. | |
587 | if any(f not in c for c in startctxs): |
|
581 | if any(f not in c for c in startctxs): | |
588 | slowpath = True |
|
582 | slowpath = True | |
589 | continue |
|
583 | continue | |
590 | elif follow and f not in wctx: |
|
584 | elif follow and f not in wctx: | |
591 | # If the file exists, it may be a directory, so let it |
|
585 | # If the file exists, it may be a directory, so let it | |
592 | # take the slow path. |
|
586 | # take the slow path. | |
593 | if os.path.exists(repo.wjoin(f)): |
|
587 | if os.path.exists(repo.wjoin(f)): | |
594 | slowpath = True |
|
588 | slowpath = True | |
595 | continue |
|
589 | continue | |
596 | else: |
|
590 | else: | |
597 | raise error.Abort(_('cannot follow file not in parent ' |
|
591 | raise error.Abort(_('cannot follow file not in parent ' | |
598 | 'revision: "%s"') % f) |
|
592 | 'revision: "%s"') % f) | |
599 | filelog = repo.file(f) |
|
593 | filelog = repo.file(f) | |
600 | if not filelog: |
|
594 | if not filelog: | |
601 | # A zero count may be a directory or deleted file, so |
|
595 | # A zero count may be a directory or deleted file, so | |
602 | # try to find matching entries on the slow path. |
|
596 | # try to find matching entries on the slow path. | |
603 | if follow: |
|
597 | if follow: | |
604 | raise error.Abort( |
|
598 | raise error.Abort( | |
605 | _('cannot follow nonexistent file: "%s"') % f) |
|
599 | _('cannot follow nonexistent file: "%s"') % f) | |
606 | slowpath = True |
|
600 | slowpath = True | |
607 |
|
601 | |||
608 | # We decided to fall back to the slowpath because at least one |
|
602 | # We decided to fall back to the slowpath because at least one | |
609 | # of the paths was not a file. Check to see if at least one of them |
|
603 | # of the paths was not a file. Check to see if at least one of them | |
610 | # existed in history - in that case, we'll continue down the |
|
604 | # existed in history - in that case, we'll continue down the | |
611 | # slowpath; otherwise, we can turn off the slowpath |
|
605 | # slowpath; otherwise, we can turn off the slowpath | |
612 | if slowpath: |
|
606 | if slowpath: | |
613 | for path in match.files(): |
|
607 | for path in match.files(): | |
614 | if path == '.' or path in repo.store: |
|
608 | if path == '.' or path in repo.store: | |
615 | break |
|
609 | break | |
616 | else: |
|
610 | else: | |
617 | slowpath = False |
|
611 | slowpath = False | |
618 |
|
612 | |||
619 | return match, pats, slowpath |
|
613 | return match, pats, slowpath | |
620 |
|
614 | |||
621 | def _fileancestors(repo, revs, match, followfirst): |
|
615 | def _fileancestors(repo, revs, match, followfirst): | |
622 | fctxs = [] |
|
616 | fctxs = [] | |
623 | for r in revs: |
|
617 | for r in revs: | |
624 | ctx = repo[r] |
|
618 | ctx = repo[r] | |
625 | fctxs.extend(ctx[f].introfilectx() for f in ctx.walk(match)) |
|
619 | fctxs.extend(ctx[f].introfilectx() for f in ctx.walk(match)) | |
626 |
|
620 | |||
627 | # When displaying a revision with --patch --follow FILE, we have |
|
621 | # When displaying a revision with --patch --follow FILE, we have | |
628 | # to know which file of the revision must be diffed. With |
|
622 | # to know which file of the revision must be diffed. With | |
629 | # --follow, we want the names of the ancestors of FILE in the |
|
623 | # --follow, we want the names of the ancestors of FILE in the | |
630 | # revision, stored in "fcache". "fcache" is populated as a side effect |
|
624 | # revision, stored in "fcache". "fcache" is populated as a side effect | |
631 | # of the graph traversal. |
|
625 | # of the graph traversal. | |
632 | fcache = {} |
|
626 | fcache = {} | |
633 | def filematcher(ctx): |
|
627 | def filematcher(ctx): | |
634 | return scmutil.matchfiles(repo, fcache.get(ctx.rev(), [])) |
|
628 | return scmutil.matchfiles(repo, fcache.get(ctx.rev(), [])) | |
635 |
|
629 | |||
636 | def revgen(): |
|
630 | def revgen(): | |
637 | for rev, cs in dagop.filectxancestors(fctxs, followfirst=followfirst): |
|
631 | for rev, cs in dagop.filectxancestors(fctxs, followfirst=followfirst): | |
638 | fcache[rev] = [c.path() for c in cs] |
|
632 | fcache[rev] = [c.path() for c in cs] | |
639 | yield rev |
|
633 | yield rev | |
640 | return smartset.generatorset(revgen(), iterasc=False), filematcher |
|
634 | return smartset.generatorset(revgen(), iterasc=False), filematcher | |
641 |
|
635 | |||
642 | def _makenofollowfilematcher(repo, pats, opts): |
|
636 | def _makenofollowfilematcher(repo, pats, opts): | |
643 | '''hook for extensions to override the filematcher for non-follow cases''' |
|
637 | '''hook for extensions to override the filematcher for non-follow cases''' | |
644 | return None |
|
638 | return None | |
645 |
|
639 | |||
646 | _opt2logrevset = { |
|
640 | _opt2logrevset = { | |
647 | 'no_merges': ('not merge()', None), |
|
641 | 'no_merges': ('not merge()', None), | |
648 | 'only_merges': ('merge()', None), |
|
642 | 'only_merges': ('merge()', None), | |
649 | '_matchfiles': (None, '_matchfiles(%ps)'), |
|
643 | '_matchfiles': (None, '_matchfiles(%ps)'), | |
650 | 'date': ('date(%s)', None), |
|
644 | 'date': ('date(%s)', None), | |
651 | 'branch': ('branch(%s)', '%lr'), |
|
645 | 'branch': ('branch(%s)', '%lr'), | |
652 | '_patslog': ('filelog(%s)', '%lr'), |
|
646 | '_patslog': ('filelog(%s)', '%lr'), | |
653 | 'keyword': ('keyword(%s)', '%lr'), |
|
647 | 'keyword': ('keyword(%s)', '%lr'), | |
654 | 'prune': ('ancestors(%s)', 'not %lr'), |
|
648 | 'prune': ('ancestors(%s)', 'not %lr'), | |
655 | 'user': ('user(%s)', '%lr'), |
|
649 | 'user': ('user(%s)', '%lr'), | |
656 | } |
|
650 | } | |
657 |
|
651 | |||
658 | def _makerevset(repo, match, pats, slowpath, opts): |
|
652 | def _makerevset(repo, match, pats, slowpath, opts): | |
659 | """Return a revset string built from log options and file patterns""" |
|
653 | """Return a revset string built from log options and file patterns""" | |
660 | opts = dict(opts) |
|
654 | opts = dict(opts) | |
661 | # follow or not follow? |
|
655 | # follow or not follow? | |
662 | follow = opts.get('follow') or opts.get('follow_first') |
|
656 | follow = opts.get('follow') or opts.get('follow_first') | |
663 |
|
657 | |||
664 | # branch and only_branch are really aliases and must be handled at |
|
658 | # branch and only_branch are really aliases and must be handled at | |
665 | # the same time |
|
659 | # the same time | |
666 | opts['branch'] = opts.get('branch', []) + opts.get('only_branch', []) |
|
660 | opts['branch'] = opts.get('branch', []) + opts.get('only_branch', []) | |
667 | opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']] |
|
661 | opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']] | |
668 |
|
662 | |||
669 | if slowpath: |
|
663 | if slowpath: | |
670 | # See walkchangerevs() slow path. |
|
664 | # See walkchangerevs() slow path. | |
671 | # |
|
665 | # | |
672 | # pats/include/exclude cannot be represented as separate |
|
666 | # pats/include/exclude cannot be represented as separate | |
673 | # revset expressions as their filtering logic applies at file |
|
667 | # revset expressions as their filtering logic applies at file | |
674 | # level. For instance "-I a -X b" matches a revision touching |
|
668 | # level. For instance "-I a -X b" matches a revision touching | |
675 | # "a" and "b" while "file(a) and not file(b)" does |
|
669 | # "a" and "b" while "file(a) and not file(b)" does | |
676 | # not. Besides, filesets are evaluated against the working |
|
670 | # not. Besides, filesets are evaluated against the working | |
677 | # directory. |
|
671 | # directory. | |
678 | matchargs = ['r:', 'd:relpath'] |
|
672 | matchargs = ['r:', 'd:relpath'] | |
679 | for p in pats: |
|
673 | for p in pats: | |
680 | matchargs.append('p:' + p) |
|
674 | matchargs.append('p:' + p) | |
681 | for p in opts.get('include', []): |
|
675 | for p in opts.get('include', []): | |
682 | matchargs.append('i:' + p) |
|
676 | matchargs.append('i:' + p) | |
683 | for p in opts.get('exclude', []): |
|
677 | for p in opts.get('exclude', []): | |
684 | matchargs.append('x:' + p) |
|
678 | matchargs.append('x:' + p) | |
685 | opts['_matchfiles'] = matchargs |
|
679 | opts['_matchfiles'] = matchargs | |
686 | elif not follow: |
|
680 | elif not follow: | |
687 | opts['_patslog'] = list(pats) |
|
681 | opts['_patslog'] = list(pats) | |
688 |
|
682 | |||
689 | expr = [] |
|
683 | expr = [] | |
690 | for op, val in sorted(opts.iteritems()): |
|
684 | for op, val in sorted(opts.iteritems()): | |
691 | if not val: |
|
685 | if not val: | |
692 | continue |
|
686 | continue | |
693 | if op not in _opt2logrevset: |
|
687 | if op not in _opt2logrevset: | |
694 | continue |
|
688 | continue | |
695 | revop, listop = _opt2logrevset[op] |
|
689 | revop, listop = _opt2logrevset[op] | |
696 | if revop and '%' not in revop: |
|
690 | if revop and '%' not in revop: | |
697 | expr.append(revop) |
|
691 | expr.append(revop) | |
698 | elif not listop: |
|
692 | elif not listop: | |
699 | expr.append(revsetlang.formatspec(revop, val)) |
|
693 | expr.append(revsetlang.formatspec(revop, val)) | |
700 | else: |
|
694 | else: | |
701 | if revop: |
|
695 | if revop: | |
702 | val = [revsetlang.formatspec(revop, v) for v in val] |
|
696 | val = [revsetlang.formatspec(revop, v) for v in val] | |
703 | expr.append(revsetlang.formatspec(listop, val)) |
|
697 | expr.append(revsetlang.formatspec(listop, val)) | |
704 |
|
698 | |||
705 | if expr: |
|
699 | if expr: | |
706 | expr = '(' + ' and '.join(expr) + ')' |
|
700 | expr = '(' + ' and '.join(expr) + ')' | |
707 | else: |
|
701 | else: | |
708 | expr = None |
|
702 | expr = None | |
709 | return expr |
|
703 | return expr | |
710 |
|
704 | |||
711 | def _initialrevs(repo, opts): |
|
705 | def _initialrevs(repo, opts): | |
712 | """Return the initial set of revisions to be filtered or followed""" |
|
706 | """Return the initial set of revisions to be filtered or followed""" | |
713 | follow = opts.get('follow') or opts.get('follow_first') |
|
707 | follow = opts.get('follow') or opts.get('follow_first') | |
714 | if opts.get('rev'): |
|
708 | if opts.get('rev'): | |
715 | revs = scmutil.revrange(repo, opts['rev']) |
|
709 | revs = scmutil.revrange(repo, opts['rev']) | |
716 | elif follow and repo.dirstate.p1() == nullid: |
|
710 | elif follow and repo.dirstate.p1() == nullid: | |
717 | revs = smartset.baseset() |
|
711 | revs = smartset.baseset() | |
718 | elif follow: |
|
712 | elif follow: | |
719 | revs = repo.revs('.') |
|
713 | revs = repo.revs('.') | |
720 | else: |
|
714 | else: | |
721 | revs = smartset.spanset(repo) |
|
715 | revs = smartset.spanset(repo) | |
722 | revs.reverse() |
|
716 | revs.reverse() | |
723 | return revs |
|
717 | return revs | |
724 |
|
718 | |||
725 | def getrevs(repo, pats, opts): |
|
719 | def getrevs(repo, pats, opts): | |
726 | """Return (revs, differ) where revs is a smartset |
|
720 | """Return (revs, differ) where revs is a smartset | |
727 |
|
721 | |||
728 | differ is a changesetdiffer with pre-configured file matcher. |
|
722 | differ is a changesetdiffer with pre-configured file matcher. | |
729 | """ |
|
723 | """ | |
730 | follow = opts.get('follow') or opts.get('follow_first') |
|
724 | follow = opts.get('follow') or opts.get('follow_first') | |
731 | followfirst = opts.get('follow_first') |
|
725 | followfirst = opts.get('follow_first') | |
732 | limit = getlimit(opts) |
|
726 | limit = getlimit(opts) | |
733 | revs = _initialrevs(repo, opts) |
|
727 | revs = _initialrevs(repo, opts) | |
734 | if not revs: |
|
728 | if not revs: | |
735 | return smartset.baseset(), None |
|
729 | return smartset.baseset(), None | |
736 | match, pats, slowpath = _makematcher(repo, revs, pats, opts) |
|
730 | match, pats, slowpath = _makematcher(repo, revs, pats, opts) | |
737 | filematcher = None |
|
731 | filematcher = None | |
738 | if follow: |
|
732 | if follow: | |
739 | if slowpath or match.always(): |
|
733 | if slowpath or match.always(): | |
740 | revs = dagop.revancestors(repo, revs, followfirst=followfirst) |
|
734 | revs = dagop.revancestors(repo, revs, followfirst=followfirst) | |
741 | else: |
|
735 | else: | |
742 | revs, filematcher = _fileancestors(repo, revs, match, followfirst) |
|
736 | revs, filematcher = _fileancestors(repo, revs, match, followfirst) | |
743 | revs.reverse() |
|
737 | revs.reverse() | |
744 | if filematcher is None: |
|
738 | if filematcher is None: | |
745 | filematcher = _makenofollowfilematcher(repo, pats, opts) |
|
739 | filematcher = _makenofollowfilematcher(repo, pats, opts) | |
746 | if filematcher is None: |
|
740 | if filematcher is None: | |
747 | def filematcher(ctx): |
|
741 | def filematcher(ctx): | |
748 | return match |
|
742 | return match | |
749 |
|
743 | |||
750 | expr = _makerevset(repo, match, pats, slowpath, opts) |
|
744 | expr = _makerevset(repo, match, pats, slowpath, opts) | |
751 | if opts.get('graph') and opts.get('rev'): |
|
745 | if opts.get('graph') and opts.get('rev'): | |
752 | # User-specified revs might be unsorted, but don't sort before |
|
746 | # User-specified revs might be unsorted, but don't sort before | |
753 | # _makerevset because it might depend on the order of revs |
|
747 | # _makerevset because it might depend on the order of revs | |
754 | if not (revs.isdescending() or revs.istopo()): |
|
748 | if not (revs.isdescending() or revs.istopo()): | |
755 | revs.sort(reverse=True) |
|
749 | revs.sort(reverse=True) | |
756 | if expr: |
|
750 | if expr: | |
757 | matcher = revset.match(None, expr) |
|
751 | matcher = revset.match(None, expr) | |
758 | revs = matcher(repo, revs) |
|
752 | revs = matcher(repo, revs) | |
759 | if limit is not None: |
|
753 | if limit is not None: | |
760 | revs = revs.slice(0, limit) |
|
754 | revs = revs.slice(0, limit) | |
761 |
|
755 | |||
762 | differ = changesetdiffer() |
|
756 | differ = changesetdiffer() | |
763 | differ._makefilematcher = filematcher |
|
757 | differ._makefilematcher = filematcher | |
764 | return revs, differ |
|
758 | return revs, differ | |
765 |
|
759 | |||
766 | def _parselinerangeopt(repo, opts): |
|
760 | def _parselinerangeopt(repo, opts): | |
767 | """Parse --line-range log option and return a list of tuples (filename, |
|
761 | """Parse --line-range log option and return a list of tuples (filename, | |
768 | (fromline, toline)). |
|
762 | (fromline, toline)). | |
769 | """ |
|
763 | """ | |
770 | linerangebyfname = [] |
|
764 | linerangebyfname = [] | |
771 | for pat in opts.get('line_range', []): |
|
765 | for pat in opts.get('line_range', []): | |
772 | try: |
|
766 | try: | |
773 | pat, linerange = pat.rsplit(',', 1) |
|
767 | pat, linerange = pat.rsplit(',', 1) | |
774 | except ValueError: |
|
768 | except ValueError: | |
775 | raise error.Abort(_('malformatted line-range pattern %s') % pat) |
|
769 | raise error.Abort(_('malformatted line-range pattern %s') % pat) | |
776 | try: |
|
770 | try: | |
777 | fromline, toline = map(int, linerange.split(':')) |
|
771 | fromline, toline = map(int, linerange.split(':')) | |
778 | except ValueError: |
|
772 | except ValueError: | |
779 | raise error.Abort(_("invalid line range for %s") % pat) |
|
773 | raise error.Abort(_("invalid line range for %s") % pat) | |
780 | msg = _("line range pattern '%s' must match exactly one file") % pat |
|
774 | msg = _("line range pattern '%s' must match exactly one file") % pat | |
781 | fname = scmutil.parsefollowlinespattern(repo, None, pat, msg) |
|
775 | fname = scmutil.parsefollowlinespattern(repo, None, pat, msg) | |
782 | linerangebyfname.append( |
|
776 | linerangebyfname.append( | |
783 | (fname, util.processlinerange(fromline, toline))) |
|
777 | (fname, util.processlinerange(fromline, toline))) | |
784 | return linerangebyfname |
|
778 | return linerangebyfname | |
785 |
|
779 | |||
786 | def getlinerangerevs(repo, userrevs, opts): |
|
780 | def getlinerangerevs(repo, userrevs, opts): | |
787 | """Return (revs, differ). |
|
781 | """Return (revs, differ). | |
788 |
|
782 | |||
789 | "revs" are revisions obtained by processing "line-range" log options and |
|
783 | "revs" are revisions obtained by processing "line-range" log options and | |
790 | walking block ancestors of each specified file/line-range. |
|
784 | walking block ancestors of each specified file/line-range. | |
791 |
|
785 | |||
792 | "differ" is a changesetdiffer with pre-configured file matcher and hunks |
|
786 | "differ" is a changesetdiffer with pre-configured file matcher and hunks | |
793 | filter. |
|
787 | filter. | |
794 | """ |
|
788 | """ | |
795 | wctx = repo[None] |
|
789 | wctx = repo[None] | |
796 |
|
790 | |||
797 | # Two-levels map of "rev -> file ctx -> [line range]". |
|
791 | # Two-levels map of "rev -> file ctx -> [line range]". | |
798 | linerangesbyrev = {} |
|
792 | linerangesbyrev = {} | |
799 | for fname, (fromline, toline) in _parselinerangeopt(repo, opts): |
|
793 | for fname, (fromline, toline) in _parselinerangeopt(repo, opts): | |
800 | if fname not in wctx: |
|
794 | if fname not in wctx: | |
801 | raise error.Abort(_('cannot follow file not in parent ' |
|
795 | raise error.Abort(_('cannot follow file not in parent ' | |
802 | 'revision: "%s"') % fname) |
|
796 | 'revision: "%s"') % fname) | |
803 | fctx = wctx.filectx(fname) |
|
797 | fctx = wctx.filectx(fname) | |
804 | for fctx, linerange in dagop.blockancestors(fctx, fromline, toline): |
|
798 | for fctx, linerange in dagop.blockancestors(fctx, fromline, toline): | |
805 | rev = fctx.introrev() |
|
799 | rev = fctx.introrev() | |
806 | if rev not in userrevs: |
|
800 | if rev not in userrevs: | |
807 | continue |
|
801 | continue | |
808 | linerangesbyrev.setdefault( |
|
802 | linerangesbyrev.setdefault( | |
809 | rev, {}).setdefault( |
|
803 | rev, {}).setdefault( | |
810 | fctx.path(), []).append(linerange) |
|
804 | fctx.path(), []).append(linerange) | |
811 |
|
805 | |||
812 | def nofilterhunksfn(fctx, hunks): |
|
806 | def nofilterhunksfn(fctx, hunks): | |
813 | return hunks |
|
807 | return hunks | |
814 |
|
808 | |||
815 | def hunksfilter(ctx): |
|
809 | def hunksfilter(ctx): | |
816 | fctxlineranges = linerangesbyrev.get(ctx.rev()) |
|
810 | fctxlineranges = linerangesbyrev.get(ctx.rev()) | |
817 | if fctxlineranges is None: |
|
811 | if fctxlineranges is None: | |
818 | return nofilterhunksfn |
|
812 | return nofilterhunksfn | |
819 |
|
813 | |||
820 | def filterfn(fctx, hunks): |
|
814 | def filterfn(fctx, hunks): | |
821 | lineranges = fctxlineranges.get(fctx.path()) |
|
815 | lineranges = fctxlineranges.get(fctx.path()) | |
822 | if lineranges is not None: |
|
816 | if lineranges is not None: | |
823 | for hr, lines in hunks: |
|
817 | for hr, lines in hunks: | |
824 | if hr is None: # binary |
|
818 | if hr is None: # binary | |
825 | yield hr, lines |
|
819 | yield hr, lines | |
826 | continue |
|
820 | continue | |
827 | if any(mdiff.hunkinrange(hr[2:], lr) |
|
821 | if any(mdiff.hunkinrange(hr[2:], lr) | |
828 | for lr in lineranges): |
|
822 | for lr in lineranges): | |
829 | yield hr, lines |
|
823 | yield hr, lines | |
830 | else: |
|
824 | else: | |
831 | for hunk in hunks: |
|
825 | for hunk in hunks: | |
832 | yield hunk |
|
826 | yield hunk | |
833 |
|
827 | |||
834 | return filterfn |
|
828 | return filterfn | |
835 |
|
829 | |||
836 | def filematcher(ctx): |
|
830 | def filematcher(ctx): | |
837 | files = list(linerangesbyrev.get(ctx.rev(), [])) |
|
831 | files = list(linerangesbyrev.get(ctx.rev(), [])) | |
838 | return scmutil.matchfiles(repo, files) |
|
832 | return scmutil.matchfiles(repo, files) | |
839 |
|
833 | |||
840 | revs = sorted(linerangesbyrev, reverse=True) |
|
834 | revs = sorted(linerangesbyrev, reverse=True) | |
841 |
|
835 | |||
842 | differ = changesetdiffer() |
|
836 | differ = changesetdiffer() | |
843 | differ._makefilematcher = filematcher |
|
837 | differ._makefilematcher = filematcher | |
844 | differ._makehunksfilter = hunksfilter |
|
838 | differ._makehunksfilter = hunksfilter | |
845 | return revs, differ |
|
839 | return revs, differ | |
846 |
|
840 | |||
847 | def _graphnodeformatter(ui, displayer): |
|
841 | def _graphnodeformatter(ui, displayer): | |
848 | spec = ui.config('ui', 'graphnodetemplate') |
|
842 | spec = ui.config('ui', 'graphnodetemplate') | |
849 | if not spec: |
|
843 | if not spec: | |
850 | return templatekw.getgraphnode # fast path for "{graphnode}" |
|
844 | return templatekw.getgraphnode # fast path for "{graphnode}" | |
851 |
|
845 | |||
852 | spec = templater.unquotestring(spec) |
|
846 | spec = templater.unquotestring(spec) | |
853 | tres = formatter.templateresources(ui) |
|
847 | tres = formatter.templateresources(ui) | |
854 | if isinstance(displayer, changesettemplater): |
|
848 | if isinstance(displayer, changesettemplater): | |
855 | # reuse cache of slow templates |
|
849 | # reuse cache of slow templates | |
856 | tres['cache'] = displayer._getcache |
|
850 | tres['cache'] = displayer._getcache | |
857 | templ = formatter.maketemplater(ui, spec, defaults=templatekw.keywords, |
|
851 | templ = formatter.maketemplater(ui, spec, defaults=templatekw.keywords, | |
858 | resources=tres) |
|
852 | resources=tres) | |
859 | def formatnode(repo, ctx): |
|
853 | def formatnode(repo, ctx): | |
860 | props = {'ctx': ctx, 'repo': repo, 'revcache': {}} |
|
854 | props = {'ctx': ctx, 'repo': repo, 'revcache': {}} | |
861 | return templ.renderdefault(props) |
|
855 | return templ.renderdefault(props) | |
862 | return formatnode |
|
856 | return formatnode | |
863 |
|
857 | |||
864 | def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None, props=None): |
|
858 | def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None, props=None): | |
865 | props = props or {} |
|
859 | props = props or {} | |
866 | formatnode = _graphnodeformatter(ui, displayer) |
|
860 | formatnode = _graphnodeformatter(ui, displayer) | |
867 | state = graphmod.asciistate() |
|
861 | state = graphmod.asciistate() | |
868 | styles = state['styles'] |
|
862 | styles = state['styles'] | |
869 |
|
863 | |||
870 | # only set graph styling if HGPLAIN is not set. |
|
864 | # only set graph styling if HGPLAIN is not set. | |
871 | if ui.plain('graph'): |
|
865 | if ui.plain('graph'): | |
872 | # set all edge styles to |, the default pre-3.8 behaviour |
|
866 | # set all edge styles to |, the default pre-3.8 behaviour | |
873 | styles.update(dict.fromkeys(styles, '|')) |
|
867 | styles.update(dict.fromkeys(styles, '|')) | |
874 | else: |
|
868 | else: | |
875 | edgetypes = { |
|
869 | edgetypes = { | |
876 | 'parent': graphmod.PARENT, |
|
870 | 'parent': graphmod.PARENT, | |
877 | 'grandparent': graphmod.GRANDPARENT, |
|
871 | 'grandparent': graphmod.GRANDPARENT, | |
878 | 'missing': graphmod.MISSINGPARENT |
|
872 | 'missing': graphmod.MISSINGPARENT | |
879 | } |
|
873 | } | |
880 | for name, key in edgetypes.items(): |
|
874 | for name, key in edgetypes.items(): | |
881 | # experimental config: experimental.graphstyle.* |
|
875 | # experimental config: experimental.graphstyle.* | |
882 | styles[key] = ui.config('experimental', 'graphstyle.%s' % name, |
|
876 | styles[key] = ui.config('experimental', 'graphstyle.%s' % name, | |
883 | styles[key]) |
|
877 | styles[key]) | |
884 | if not styles[key]: |
|
878 | if not styles[key]: | |
885 | styles[key] = None |
|
879 | styles[key] = None | |
886 |
|
880 | |||
887 | # experimental config: experimental.graphshorten |
|
881 | # experimental config: experimental.graphshorten | |
888 | state['graphshorten'] = ui.configbool('experimental', 'graphshorten') |
|
882 | state['graphshorten'] = ui.configbool('experimental', 'graphshorten') | |
889 |
|
883 | |||
890 | for rev, type, ctx, parents in dag: |
|
884 | for rev, type, ctx, parents in dag: | |
891 | char = formatnode(repo, ctx) |
|
885 | char = formatnode(repo, ctx) | |
892 | copies = None |
|
886 | copies = None | |
893 | if getrenamed and ctx.rev(): |
|
887 | if getrenamed and ctx.rev(): | |
894 | copies = [] |
|
888 | copies = [] | |
895 | for fn in ctx.files(): |
|
889 | for fn in ctx.files(): | |
896 | rename = getrenamed(fn, ctx.rev()) |
|
890 | rename = getrenamed(fn, ctx.rev()) | |
897 | if rename: |
|
891 | if rename: | |
898 | copies.append((fn, rename[0])) |
|
892 | copies.append((fn, rename[0])) | |
899 | edges = edgefn(type, char, state, rev, parents) |
|
893 | edges = edgefn(type, char, state, rev, parents) | |
900 | firstedge = next(edges) |
|
894 | firstedge = next(edges) | |
901 | width = firstedge[2] |
|
895 | width = firstedge[2] | |
902 | displayer.show(ctx, copies=copies, |
|
896 | displayer.show(ctx, copies=copies, | |
903 | graphwidth=width, **pycompat.strkwargs(props)) |
|
897 | graphwidth=width, **pycompat.strkwargs(props)) | |
904 | lines = displayer.hunk.pop(rev).split('\n') |
|
898 | lines = displayer.hunk.pop(rev).split('\n') | |
905 | if not lines[-1]: |
|
899 | if not lines[-1]: | |
906 | del lines[-1] |
|
900 | del lines[-1] | |
907 | displayer.flush(ctx) |
|
901 | displayer.flush(ctx) | |
908 | for type, char, width, coldata in itertools.chain([firstedge], edges): |
|
902 | for type, char, width, coldata in itertools.chain([firstedge], edges): | |
909 | graphmod.ascii(ui, state, type, char, lines, coldata) |
|
903 | graphmod.ascii(ui, state, type, char, lines, coldata) | |
910 | lines = [] |
|
904 | lines = [] | |
911 | displayer.close() |
|
905 | displayer.close() | |
912 |
|
906 | |||
913 | def displaygraphrevs(ui, repo, revs, displayer, getrenamed): |
|
907 | def displaygraphrevs(ui, repo, revs, displayer, getrenamed): | |
914 | revdag = graphmod.dagwalker(repo, revs) |
|
908 | revdag = graphmod.dagwalker(repo, revs) | |
915 | displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed) |
|
909 | displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed) | |
916 |
|
910 | |||
917 | def displayrevs(ui, repo, revs, displayer, getrenamed): |
|
911 | def displayrevs(ui, repo, revs, displayer, getrenamed): | |
918 | for rev in revs: |
|
912 | for rev in revs: | |
919 | ctx = repo[rev] |
|
913 | ctx = repo[rev] | |
920 | copies = None |
|
914 | copies = None | |
921 | if getrenamed is not None and rev: |
|
915 | if getrenamed is not None and rev: | |
922 | copies = [] |
|
916 | copies = [] | |
923 | for fn in ctx.files(): |
|
917 | for fn in ctx.files(): | |
924 | rename = getrenamed(fn, rev) |
|
918 | rename = getrenamed(fn, rev) | |
925 | if rename: |
|
919 | if rename: | |
926 | copies.append((fn, rename[0])) |
|
920 | copies.append((fn, rename[0])) | |
927 | displayer.show(ctx, copies=copies) |
|
921 | displayer.show(ctx, copies=copies) | |
928 | displayer.flush(ctx) |
|
922 | displayer.flush(ctx) | |
929 | displayer.close() |
|
923 | displayer.close() | |
930 |
|
924 | |||
931 | def checkunsupportedgraphflags(pats, opts): |
|
925 | def checkunsupportedgraphflags(pats, opts): | |
932 | for op in ["newest_first"]: |
|
926 | for op in ["newest_first"]: | |
933 | if op in opts and opts[op]: |
|
927 | if op in opts and opts[op]: | |
934 | raise error.Abort(_("-G/--graph option is incompatible with --%s") |
|
928 | raise error.Abort(_("-G/--graph option is incompatible with --%s") | |
935 | % op.replace("_", "-")) |
|
929 | % op.replace("_", "-")) | |
936 |
|
930 | |||
937 | def graphrevs(repo, nodes, opts): |
|
931 | def graphrevs(repo, nodes, opts): | |
938 | limit = getlimit(opts) |
|
932 | limit = getlimit(opts) | |
939 | nodes.reverse() |
|
933 | nodes.reverse() | |
940 | if limit is not None: |
|
934 | if limit is not None: | |
941 | nodes = nodes[:limit] |
|
935 | nodes = nodes[:limit] | |
942 | return graphmod.nodes(repo, nodes) |
|
936 | return graphmod.nodes(repo, nodes) |
@@ -1,797 +1,801 b'' | |||||
1 | # templater.py - template expansion for output |
|
1 | # templater.py - template expansion for output | |
2 | # |
|
2 | # | |
3 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import, print_function |
|
8 | from __future__ import absolute_import, print_function | |
9 |
|
9 | |||
10 | import os |
|
10 | import os | |
11 |
|
11 | |||
12 | from .i18n import _ |
|
12 | from .i18n import _ | |
13 | from . import ( |
|
13 | from . import ( | |
14 | config, |
|
14 | config, | |
15 | encoding, |
|
15 | encoding, | |
16 | error, |
|
16 | error, | |
17 | parser, |
|
17 | parser, | |
18 | pycompat, |
|
18 | pycompat, | |
19 | templatefilters, |
|
19 | templatefilters, | |
20 | templatefuncs, |
|
20 | templatefuncs, | |
21 | templateutil, |
|
21 | templateutil, | |
22 | util, |
|
22 | util, | |
23 | ) |
|
23 | ) | |
24 |
|
24 | |||
25 | # template parsing |
|
25 | # template parsing | |
26 |
|
26 | |||
27 | elements = { |
|
27 | elements = { | |
28 | # token-type: binding-strength, primary, prefix, infix, suffix |
|
28 | # token-type: binding-strength, primary, prefix, infix, suffix | |
29 | "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None), |
|
29 | "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None), | |
30 | ".": (18, None, None, (".", 18), None), |
|
30 | ".": (18, None, None, (".", 18), None), | |
31 | "%": (15, None, None, ("%", 15), None), |
|
31 | "%": (15, None, None, ("%", 15), None), | |
32 | "|": (15, None, None, ("|", 15), None), |
|
32 | "|": (15, None, None, ("|", 15), None), | |
33 | "*": (5, None, None, ("*", 5), None), |
|
33 | "*": (5, None, None, ("*", 5), None), | |
34 | "/": (5, None, None, ("/", 5), None), |
|
34 | "/": (5, None, None, ("/", 5), None), | |
35 | "+": (4, None, None, ("+", 4), None), |
|
35 | "+": (4, None, None, ("+", 4), None), | |
36 | "-": (4, None, ("negate", 19), ("-", 4), None), |
|
36 | "-": (4, None, ("negate", 19), ("-", 4), None), | |
37 | "=": (3, None, None, ("keyvalue", 3), None), |
|
37 | "=": (3, None, None, ("keyvalue", 3), None), | |
38 | ",": (2, None, None, ("list", 2), None), |
|
38 | ",": (2, None, None, ("list", 2), None), | |
39 | ")": (0, None, None, None, None), |
|
39 | ")": (0, None, None, None, None), | |
40 | "integer": (0, "integer", None, None, None), |
|
40 | "integer": (0, "integer", None, None, None), | |
41 | "symbol": (0, "symbol", None, None, None), |
|
41 | "symbol": (0, "symbol", None, None, None), | |
42 | "string": (0, "string", None, None, None), |
|
42 | "string": (0, "string", None, None, None), | |
43 | "template": (0, "template", None, None, None), |
|
43 | "template": (0, "template", None, None, None), | |
44 | "end": (0, None, None, None, None), |
|
44 | "end": (0, None, None, None, None), | |
45 | } |
|
45 | } | |
46 |
|
46 | |||
47 | def tokenize(program, start, end, term=None): |
|
47 | def tokenize(program, start, end, term=None): | |
48 | """Parse a template expression into a stream of tokens, which must end |
|
48 | """Parse a template expression into a stream of tokens, which must end | |
49 | with term if specified""" |
|
49 | with term if specified""" | |
50 | pos = start |
|
50 | pos = start | |
51 | program = pycompat.bytestr(program) |
|
51 | program = pycompat.bytestr(program) | |
52 | while pos < end: |
|
52 | while pos < end: | |
53 | c = program[pos] |
|
53 | c = program[pos] | |
54 | if c.isspace(): # skip inter-token whitespace |
|
54 | if c.isspace(): # skip inter-token whitespace | |
55 | pass |
|
55 | pass | |
56 | elif c in "(=,).%|+-*/": # handle simple operators |
|
56 | elif c in "(=,).%|+-*/": # handle simple operators | |
57 | yield (c, None, pos) |
|
57 | yield (c, None, pos) | |
58 | elif c in '"\'': # handle quoted templates |
|
58 | elif c in '"\'': # handle quoted templates | |
59 | s = pos + 1 |
|
59 | s = pos + 1 | |
60 | data, pos = _parsetemplate(program, s, end, c) |
|
60 | data, pos = _parsetemplate(program, s, end, c) | |
61 | yield ('template', data, s) |
|
61 | yield ('template', data, s) | |
62 | pos -= 1 |
|
62 | pos -= 1 | |
63 | elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'): |
|
63 | elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'): | |
64 | # handle quoted strings |
|
64 | # handle quoted strings | |
65 | c = program[pos + 1] |
|
65 | c = program[pos + 1] | |
66 | s = pos = pos + 2 |
|
66 | s = pos = pos + 2 | |
67 | while pos < end: # find closing quote |
|
67 | while pos < end: # find closing quote | |
68 | d = program[pos] |
|
68 | d = program[pos] | |
69 | if d == '\\': # skip over escaped characters |
|
69 | if d == '\\': # skip over escaped characters | |
70 | pos += 2 |
|
70 | pos += 2 | |
71 | continue |
|
71 | continue | |
72 | if d == c: |
|
72 | if d == c: | |
73 | yield ('string', program[s:pos], s) |
|
73 | yield ('string', program[s:pos], s) | |
74 | break |
|
74 | break | |
75 | pos += 1 |
|
75 | pos += 1 | |
76 | else: |
|
76 | else: | |
77 | raise error.ParseError(_("unterminated string"), s) |
|
77 | raise error.ParseError(_("unterminated string"), s) | |
78 | elif c.isdigit(): |
|
78 | elif c.isdigit(): | |
79 | s = pos |
|
79 | s = pos | |
80 | while pos < end: |
|
80 | while pos < end: | |
81 | d = program[pos] |
|
81 | d = program[pos] | |
82 | if not d.isdigit(): |
|
82 | if not d.isdigit(): | |
83 | break |
|
83 | break | |
84 | pos += 1 |
|
84 | pos += 1 | |
85 | yield ('integer', program[s:pos], s) |
|
85 | yield ('integer', program[s:pos], s) | |
86 | pos -= 1 |
|
86 | pos -= 1 | |
87 | elif (c == '\\' and program[pos:pos + 2] in (br"\'", br'\"') |
|
87 | elif (c == '\\' and program[pos:pos + 2] in (br"\'", br'\"') | |
88 | or c == 'r' and program[pos:pos + 3] in (br"r\'", br'r\"')): |
|
88 | or c == 'r' and program[pos:pos + 3] in (br"r\'", br'r\"')): | |
89 | # handle escaped quoted strings for compatibility with 2.9.2-3.4, |
|
89 | # handle escaped quoted strings for compatibility with 2.9.2-3.4, | |
90 | # where some of nested templates were preprocessed as strings and |
|
90 | # where some of nested templates were preprocessed as strings and | |
91 | # then compiled. therefore, \"...\" was allowed. (issue4733) |
|
91 | # then compiled. therefore, \"...\" was allowed. (issue4733) | |
92 | # |
|
92 | # | |
93 | # processing flow of _evalifliteral() at 5ab28a2e9962: |
|
93 | # processing flow of _evalifliteral() at 5ab28a2e9962: | |
94 | # outer template string -> stringify() -> compiletemplate() |
|
94 | # outer template string -> stringify() -> compiletemplate() | |
95 | # ------------------------ ------------ ------------------ |
|
95 | # ------------------------ ------------ ------------------ | |
96 | # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}] |
|
96 | # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}] | |
97 | # ~~~~~~~~ |
|
97 | # ~~~~~~~~ | |
98 | # escaped quoted string |
|
98 | # escaped quoted string | |
99 | if c == 'r': |
|
99 | if c == 'r': | |
100 | pos += 1 |
|
100 | pos += 1 | |
101 | token = 'string' |
|
101 | token = 'string' | |
102 | else: |
|
102 | else: | |
103 | token = 'template' |
|
103 | token = 'template' | |
104 | quote = program[pos:pos + 2] |
|
104 | quote = program[pos:pos + 2] | |
105 | s = pos = pos + 2 |
|
105 | s = pos = pos + 2 | |
106 | while pos < end: # find closing escaped quote |
|
106 | while pos < end: # find closing escaped quote | |
107 | if program.startswith('\\\\\\', pos, end): |
|
107 | if program.startswith('\\\\\\', pos, end): | |
108 | pos += 4 # skip over double escaped characters |
|
108 | pos += 4 # skip over double escaped characters | |
109 | continue |
|
109 | continue | |
110 | if program.startswith(quote, pos, end): |
|
110 | if program.startswith(quote, pos, end): | |
111 | # interpret as if it were a part of an outer string |
|
111 | # interpret as if it were a part of an outer string | |
112 | data = parser.unescapestr(program[s:pos]) |
|
112 | data = parser.unescapestr(program[s:pos]) | |
113 | if token == 'template': |
|
113 | if token == 'template': | |
114 | data = _parsetemplate(data, 0, len(data))[0] |
|
114 | data = _parsetemplate(data, 0, len(data))[0] | |
115 | yield (token, data, s) |
|
115 | yield (token, data, s) | |
116 | pos += 1 |
|
116 | pos += 1 | |
117 | break |
|
117 | break | |
118 | pos += 1 |
|
118 | pos += 1 | |
119 | else: |
|
119 | else: | |
120 | raise error.ParseError(_("unterminated string"), s) |
|
120 | raise error.ParseError(_("unterminated string"), s) | |
121 | elif c.isalnum() or c in '_': |
|
121 | elif c.isalnum() or c in '_': | |
122 | s = pos |
|
122 | s = pos | |
123 | pos += 1 |
|
123 | pos += 1 | |
124 | while pos < end: # find end of symbol |
|
124 | while pos < end: # find end of symbol | |
125 | d = program[pos] |
|
125 | d = program[pos] | |
126 | if not (d.isalnum() or d == "_"): |
|
126 | if not (d.isalnum() or d == "_"): | |
127 | break |
|
127 | break | |
128 | pos += 1 |
|
128 | pos += 1 | |
129 | sym = program[s:pos] |
|
129 | sym = program[s:pos] | |
130 | yield ('symbol', sym, s) |
|
130 | yield ('symbol', sym, s) | |
131 | pos -= 1 |
|
131 | pos -= 1 | |
132 | elif c == term: |
|
132 | elif c == term: | |
133 | yield ('end', None, pos) |
|
133 | yield ('end', None, pos) | |
134 | return |
|
134 | return | |
135 | else: |
|
135 | else: | |
136 | raise error.ParseError(_("syntax error"), pos) |
|
136 | raise error.ParseError(_("syntax error"), pos) | |
137 | pos += 1 |
|
137 | pos += 1 | |
138 | if term: |
|
138 | if term: | |
139 | raise error.ParseError(_("unterminated template expansion"), start) |
|
139 | raise error.ParseError(_("unterminated template expansion"), start) | |
140 | yield ('end', None, pos) |
|
140 | yield ('end', None, pos) | |
141 |
|
141 | |||
142 | def _parsetemplate(tmpl, start, stop, quote=''): |
|
142 | def _parsetemplate(tmpl, start, stop, quote=''): | |
143 | r""" |
|
143 | r""" | |
144 | >>> _parsetemplate(b'foo{bar}"baz', 0, 12) |
|
144 | >>> _parsetemplate(b'foo{bar}"baz', 0, 12) | |
145 | ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12) |
|
145 | ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12) | |
146 | >>> _parsetemplate(b'foo{bar}"baz', 0, 12, quote=b'"') |
|
146 | >>> _parsetemplate(b'foo{bar}"baz', 0, 12, quote=b'"') | |
147 | ([('string', 'foo'), ('symbol', 'bar')], 9) |
|
147 | ([('string', 'foo'), ('symbol', 'bar')], 9) | |
148 | >>> _parsetemplate(b'foo"{bar}', 0, 9, quote=b'"') |
|
148 | >>> _parsetemplate(b'foo"{bar}', 0, 9, quote=b'"') | |
149 | ([('string', 'foo')], 4) |
|
149 | ([('string', 'foo')], 4) | |
150 | >>> _parsetemplate(br'foo\"bar"baz', 0, 12, quote=b'"') |
|
150 | >>> _parsetemplate(br'foo\"bar"baz', 0, 12, quote=b'"') | |
151 | ([('string', 'foo"'), ('string', 'bar')], 9) |
|
151 | ([('string', 'foo"'), ('string', 'bar')], 9) | |
152 | >>> _parsetemplate(br'foo\\"bar', 0, 10, quote=b'"') |
|
152 | >>> _parsetemplate(br'foo\\"bar', 0, 10, quote=b'"') | |
153 | ([('string', 'foo\\')], 6) |
|
153 | ([('string', 'foo\\')], 6) | |
154 | """ |
|
154 | """ | |
155 | parsed = [] |
|
155 | parsed = [] | |
156 | for typ, val, pos in _scantemplate(tmpl, start, stop, quote): |
|
156 | for typ, val, pos in _scantemplate(tmpl, start, stop, quote): | |
157 | if typ == 'string': |
|
157 | if typ == 'string': | |
158 | parsed.append((typ, val)) |
|
158 | parsed.append((typ, val)) | |
159 | elif typ == 'template': |
|
159 | elif typ == 'template': | |
160 | parsed.append(val) |
|
160 | parsed.append(val) | |
161 | elif typ == 'end': |
|
161 | elif typ == 'end': | |
162 | return parsed, pos |
|
162 | return parsed, pos | |
163 | else: |
|
163 | else: | |
164 | raise error.ProgrammingError('unexpected type: %s' % typ) |
|
164 | raise error.ProgrammingError('unexpected type: %s' % typ) | |
165 | raise error.ProgrammingError('unterminated scanning of template') |
|
165 | raise error.ProgrammingError('unterminated scanning of template') | |
166 |
|
166 | |||
167 | def scantemplate(tmpl, raw=False): |
|
167 | def scantemplate(tmpl, raw=False): | |
168 | r"""Scan (type, start, end) positions of outermost elements in template |
|
168 | r"""Scan (type, start, end) positions of outermost elements in template | |
169 |
|
169 | |||
170 | If raw=True, a backslash is not taken as an escape character just like |
|
170 | If raw=True, a backslash is not taken as an escape character just like | |
171 | r'' string in Python. Note that this is different from r'' literal in |
|
171 | r'' string in Python. Note that this is different from r'' literal in | |
172 | template in that no template fragment can appear in r'', e.g. r'{foo}' |
|
172 | template in that no template fragment can appear in r'', e.g. r'{foo}' | |
173 | is a literal '{foo}', but ('{foo}', raw=True) is a template expression |
|
173 | is a literal '{foo}', but ('{foo}', raw=True) is a template expression | |
174 | 'foo'. |
|
174 | 'foo'. | |
175 |
|
175 | |||
176 | >>> list(scantemplate(b'foo{bar}"baz')) |
|
176 | >>> list(scantemplate(b'foo{bar}"baz')) | |
177 | [('string', 0, 3), ('template', 3, 8), ('string', 8, 12)] |
|
177 | [('string', 0, 3), ('template', 3, 8), ('string', 8, 12)] | |
178 | >>> list(scantemplate(b'outer{"inner"}outer')) |
|
178 | >>> list(scantemplate(b'outer{"inner"}outer')) | |
179 | [('string', 0, 5), ('template', 5, 14), ('string', 14, 19)] |
|
179 | [('string', 0, 5), ('template', 5, 14), ('string', 14, 19)] | |
180 | >>> list(scantemplate(b'foo\\{escaped}')) |
|
180 | >>> list(scantemplate(b'foo\\{escaped}')) | |
181 | [('string', 0, 5), ('string', 5, 13)] |
|
181 | [('string', 0, 5), ('string', 5, 13)] | |
182 | >>> list(scantemplate(b'foo\\{escaped}', raw=True)) |
|
182 | >>> list(scantemplate(b'foo\\{escaped}', raw=True)) | |
183 | [('string', 0, 4), ('template', 4, 13)] |
|
183 | [('string', 0, 4), ('template', 4, 13)] | |
184 | """ |
|
184 | """ | |
185 | last = None |
|
185 | last = None | |
186 | for typ, val, pos in _scantemplate(tmpl, 0, len(tmpl), raw=raw): |
|
186 | for typ, val, pos in _scantemplate(tmpl, 0, len(tmpl), raw=raw): | |
187 | if last: |
|
187 | if last: | |
188 | yield last + (pos,) |
|
188 | yield last + (pos,) | |
189 | if typ == 'end': |
|
189 | if typ == 'end': | |
190 | return |
|
190 | return | |
191 | else: |
|
191 | else: | |
192 | last = (typ, pos) |
|
192 | last = (typ, pos) | |
193 | raise error.ProgrammingError('unterminated scanning of template') |
|
193 | raise error.ProgrammingError('unterminated scanning of template') | |
194 |
|
194 | |||
195 | def _scantemplate(tmpl, start, stop, quote='', raw=False): |
|
195 | def _scantemplate(tmpl, start, stop, quote='', raw=False): | |
196 | """Parse template string into chunks of strings and template expressions""" |
|
196 | """Parse template string into chunks of strings and template expressions""" | |
197 | sepchars = '{' + quote |
|
197 | sepchars = '{' + quote | |
198 | unescape = [parser.unescapestr, pycompat.identity][raw] |
|
198 | unescape = [parser.unescapestr, pycompat.identity][raw] | |
199 | pos = start |
|
199 | pos = start | |
200 | p = parser.parser(elements) |
|
200 | p = parser.parser(elements) | |
201 | try: |
|
201 | try: | |
202 | while pos < stop: |
|
202 | while pos < stop: | |
203 | n = min((tmpl.find(c, pos, stop) for c in sepchars), |
|
203 | n = min((tmpl.find(c, pos, stop) for c in sepchars), | |
204 | key=lambda n: (n < 0, n)) |
|
204 | key=lambda n: (n < 0, n)) | |
205 | if n < 0: |
|
205 | if n < 0: | |
206 | yield ('string', unescape(tmpl[pos:stop]), pos) |
|
206 | yield ('string', unescape(tmpl[pos:stop]), pos) | |
207 | pos = stop |
|
207 | pos = stop | |
208 | break |
|
208 | break | |
209 | c = tmpl[n:n + 1] |
|
209 | c = tmpl[n:n + 1] | |
210 | bs = 0 # count leading backslashes |
|
210 | bs = 0 # count leading backslashes | |
211 | if not raw: |
|
211 | if not raw: | |
212 | bs = (n - pos) - len(tmpl[pos:n].rstrip('\\')) |
|
212 | bs = (n - pos) - len(tmpl[pos:n].rstrip('\\')) | |
213 | if bs % 2 == 1: |
|
213 | if bs % 2 == 1: | |
214 | # escaped (e.g. '\{', '\\\{', but not '\\{') |
|
214 | # escaped (e.g. '\{', '\\\{', but not '\\{') | |
215 | yield ('string', unescape(tmpl[pos:n - 1]) + c, pos) |
|
215 | yield ('string', unescape(tmpl[pos:n - 1]) + c, pos) | |
216 | pos = n + 1 |
|
216 | pos = n + 1 | |
217 | continue |
|
217 | continue | |
218 | if n > pos: |
|
218 | if n > pos: | |
219 | yield ('string', unescape(tmpl[pos:n]), pos) |
|
219 | yield ('string', unescape(tmpl[pos:n]), pos) | |
220 | if c == quote: |
|
220 | if c == quote: | |
221 | yield ('end', None, n + 1) |
|
221 | yield ('end', None, n + 1) | |
222 | return |
|
222 | return | |
223 |
|
223 | |||
224 | parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}')) |
|
224 | parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}')) | |
225 | if not tmpl.startswith('}', pos): |
|
225 | if not tmpl.startswith('}', pos): | |
226 | raise error.ParseError(_("invalid token"), pos) |
|
226 | raise error.ParseError(_("invalid token"), pos) | |
227 | yield ('template', parseres, n) |
|
227 | yield ('template', parseres, n) | |
228 | pos += 1 |
|
228 | pos += 1 | |
229 |
|
229 | |||
230 | if quote: |
|
230 | if quote: | |
231 | raise error.ParseError(_("unterminated string"), start) |
|
231 | raise error.ParseError(_("unterminated string"), start) | |
232 | except error.ParseError as inst: |
|
232 | except error.ParseError as inst: | |
233 | if len(inst.args) > 1: # has location |
|
233 | if len(inst.args) > 1: # has location | |
234 | loc = inst.args[1] |
|
234 | loc = inst.args[1] | |
235 | # Offset the caret location by the number of newlines before the |
|
235 | # Offset the caret location by the number of newlines before the | |
236 | # location of the error, since we will replace one-char newlines |
|
236 | # location of the error, since we will replace one-char newlines | |
237 | # with the two-char literal r'\n'. |
|
237 | # with the two-char literal r'\n'. | |
238 | offset = tmpl[:loc].count('\n') |
|
238 | offset = tmpl[:loc].count('\n') | |
239 | tmpl = tmpl.replace('\n', br'\n') |
|
239 | tmpl = tmpl.replace('\n', br'\n') | |
240 | # We want the caret to point to the place in the template that |
|
240 | # We want the caret to point to the place in the template that | |
241 | # failed to parse, but in a hint we get a open paren at the |
|
241 | # failed to parse, but in a hint we get a open paren at the | |
242 | # start. Therefore, we print "loc + 1" spaces (instead of "loc") |
|
242 | # start. Therefore, we print "loc + 1" spaces (instead of "loc") | |
243 | # to line up the caret with the location of the error. |
|
243 | # to line up the caret with the location of the error. | |
244 | inst.hint = (tmpl + '\n' |
|
244 | inst.hint = (tmpl + '\n' | |
245 | + ' ' * (loc + 1 + offset) + '^ ' + _('here')) |
|
245 | + ' ' * (loc + 1 + offset) + '^ ' + _('here')) | |
246 | raise |
|
246 | raise | |
247 | yield ('end', None, pos) |
|
247 | yield ('end', None, pos) | |
248 |
|
248 | |||
249 | def _unnesttemplatelist(tree): |
|
249 | def _unnesttemplatelist(tree): | |
250 | """Expand list of templates to node tuple |
|
250 | """Expand list of templates to node tuple | |
251 |
|
251 | |||
252 | >>> def f(tree): |
|
252 | >>> def f(tree): | |
253 | ... print(pycompat.sysstr(prettyformat(_unnesttemplatelist(tree)))) |
|
253 | ... print(pycompat.sysstr(prettyformat(_unnesttemplatelist(tree)))) | |
254 | >>> f((b'template', [])) |
|
254 | >>> f((b'template', [])) | |
255 | (string '') |
|
255 | (string '') | |
256 | >>> f((b'template', [(b'string', b'foo')])) |
|
256 | >>> f((b'template', [(b'string', b'foo')])) | |
257 | (string 'foo') |
|
257 | (string 'foo') | |
258 | >>> f((b'template', [(b'string', b'foo'), (b'symbol', b'rev')])) |
|
258 | >>> f((b'template', [(b'string', b'foo'), (b'symbol', b'rev')])) | |
259 | (template |
|
259 | (template | |
260 | (string 'foo') |
|
260 | (string 'foo') | |
261 | (symbol 'rev')) |
|
261 | (symbol 'rev')) | |
262 | >>> f((b'template', [(b'symbol', b'rev')])) # template(rev) -> str |
|
262 | >>> f((b'template', [(b'symbol', b'rev')])) # template(rev) -> str | |
263 | (template |
|
263 | (template | |
264 | (symbol 'rev')) |
|
264 | (symbol 'rev')) | |
265 | >>> f((b'template', [(b'template', [(b'string', b'foo')])])) |
|
265 | >>> f((b'template', [(b'template', [(b'string', b'foo')])])) | |
266 | (string 'foo') |
|
266 | (string 'foo') | |
267 | """ |
|
267 | """ | |
268 | if not isinstance(tree, tuple): |
|
268 | if not isinstance(tree, tuple): | |
269 | return tree |
|
269 | return tree | |
270 | op = tree[0] |
|
270 | op = tree[0] | |
271 | if op != 'template': |
|
271 | if op != 'template': | |
272 | return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:]) |
|
272 | return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:]) | |
273 |
|
273 | |||
274 | assert len(tree) == 2 |
|
274 | assert len(tree) == 2 | |
275 | xs = tuple(_unnesttemplatelist(x) for x in tree[1]) |
|
275 | xs = tuple(_unnesttemplatelist(x) for x in tree[1]) | |
276 | if not xs: |
|
276 | if not xs: | |
277 | return ('string', '') # empty template "" |
|
277 | return ('string', '') # empty template "" | |
278 | elif len(xs) == 1 and xs[0][0] == 'string': |
|
278 | elif len(xs) == 1 and xs[0][0] == 'string': | |
279 | return xs[0] # fast path for string with no template fragment "x" |
|
279 | return xs[0] # fast path for string with no template fragment "x" | |
280 | else: |
|
280 | else: | |
281 | return (op,) + xs |
|
281 | return (op,) + xs | |
282 |
|
282 | |||
283 | def parse(tmpl): |
|
283 | def parse(tmpl): | |
284 | """Parse template string into tree""" |
|
284 | """Parse template string into tree""" | |
285 | parsed, pos = _parsetemplate(tmpl, 0, len(tmpl)) |
|
285 | parsed, pos = _parsetemplate(tmpl, 0, len(tmpl)) | |
286 | assert pos == len(tmpl), 'unquoted template should be consumed' |
|
286 | assert pos == len(tmpl), 'unquoted template should be consumed' | |
287 | return _unnesttemplatelist(('template', parsed)) |
|
287 | return _unnesttemplatelist(('template', parsed)) | |
288 |
|
288 | |||
289 | def _parseexpr(expr): |
|
289 | def _parseexpr(expr): | |
290 | """Parse a template expression into tree |
|
290 | """Parse a template expression into tree | |
291 |
|
291 | |||
292 | >>> _parseexpr(b'"foo"') |
|
292 | >>> _parseexpr(b'"foo"') | |
293 | ('string', 'foo') |
|
293 | ('string', 'foo') | |
294 | >>> _parseexpr(b'foo(bar)') |
|
294 | >>> _parseexpr(b'foo(bar)') | |
295 | ('func', ('symbol', 'foo'), ('symbol', 'bar')) |
|
295 | ('func', ('symbol', 'foo'), ('symbol', 'bar')) | |
296 | >>> _parseexpr(b'foo(') |
|
296 | >>> _parseexpr(b'foo(') | |
297 | Traceback (most recent call last): |
|
297 | Traceback (most recent call last): | |
298 | ... |
|
298 | ... | |
299 | ParseError: ('not a prefix: end', 4) |
|
299 | ParseError: ('not a prefix: end', 4) | |
300 | >>> _parseexpr(b'"foo" "bar"') |
|
300 | >>> _parseexpr(b'"foo" "bar"') | |
301 | Traceback (most recent call last): |
|
301 | Traceback (most recent call last): | |
302 | ... |
|
302 | ... | |
303 | ParseError: ('invalid token', 7) |
|
303 | ParseError: ('invalid token', 7) | |
304 | """ |
|
304 | """ | |
305 | p = parser.parser(elements) |
|
305 | p = parser.parser(elements) | |
306 | tree, pos = p.parse(tokenize(expr, 0, len(expr))) |
|
306 | tree, pos = p.parse(tokenize(expr, 0, len(expr))) | |
307 | if pos != len(expr): |
|
307 | if pos != len(expr): | |
308 | raise error.ParseError(_('invalid token'), pos) |
|
308 | raise error.ParseError(_('invalid token'), pos) | |
309 | return _unnesttemplatelist(tree) |
|
309 | return _unnesttemplatelist(tree) | |
310 |
|
310 | |||
311 | def prettyformat(tree): |
|
311 | def prettyformat(tree): | |
312 | return parser.prettyformat(tree, ('integer', 'string', 'symbol')) |
|
312 | return parser.prettyformat(tree, ('integer', 'string', 'symbol')) | |
313 |
|
313 | |||
314 | def compileexp(exp, context, curmethods): |
|
314 | def compileexp(exp, context, curmethods): | |
315 | """Compile parsed template tree to (func, data) pair""" |
|
315 | """Compile parsed template tree to (func, data) pair""" | |
316 | if not exp: |
|
316 | if not exp: | |
317 | raise error.ParseError(_("missing argument")) |
|
317 | raise error.ParseError(_("missing argument")) | |
318 | t = exp[0] |
|
318 | t = exp[0] | |
319 | if t in curmethods: |
|
319 | if t in curmethods: | |
320 | return curmethods[t](exp, context) |
|
320 | return curmethods[t](exp, context) | |
321 | raise error.ParseError(_("unknown method '%s'") % t) |
|
321 | raise error.ParseError(_("unknown method '%s'") % t) | |
322 |
|
322 | |||
323 | # template evaluation |
|
323 | # template evaluation | |
324 |
|
324 | |||
325 | def getsymbol(exp): |
|
325 | def getsymbol(exp): | |
326 | if exp[0] == 'symbol': |
|
326 | if exp[0] == 'symbol': | |
327 | return exp[1] |
|
327 | return exp[1] | |
328 | raise error.ParseError(_("expected a symbol, got '%s'") % exp[0]) |
|
328 | raise error.ParseError(_("expected a symbol, got '%s'") % exp[0]) | |
329 |
|
329 | |||
330 | def getlist(x): |
|
330 | def getlist(x): | |
331 | if not x: |
|
331 | if not x: | |
332 | return [] |
|
332 | return [] | |
333 | if x[0] == 'list': |
|
333 | if x[0] == 'list': | |
334 | return getlist(x[1]) + [x[2]] |
|
334 | return getlist(x[1]) + [x[2]] | |
335 | return [x] |
|
335 | return [x] | |
336 |
|
336 | |||
337 | def gettemplate(exp, context): |
|
337 | def gettemplate(exp, context): | |
338 | """Compile given template tree or load named template from map file; |
|
338 | """Compile given template tree or load named template from map file; | |
339 | returns (func, data) pair""" |
|
339 | returns (func, data) pair""" | |
340 | if exp[0] in ('template', 'string'): |
|
340 | if exp[0] in ('template', 'string'): | |
341 | return compileexp(exp, context, methods) |
|
341 | return compileexp(exp, context, methods) | |
342 | if exp[0] == 'symbol': |
|
342 | if exp[0] == 'symbol': | |
343 | # unlike runsymbol(), here 'symbol' is always taken as template name |
|
343 | # unlike runsymbol(), here 'symbol' is always taken as template name | |
344 | # even if it exists in mapping. this allows us to override mapping |
|
344 | # even if it exists in mapping. this allows us to override mapping | |
345 | # by web templates, e.g. 'changelogtag' is redefined in map file. |
|
345 | # by web templates, e.g. 'changelogtag' is redefined in map file. | |
346 | return context._load(exp[1]) |
|
346 | return context._load(exp[1]) | |
347 | raise error.ParseError(_("expected template specifier")) |
|
347 | raise error.ParseError(_("expected template specifier")) | |
348 |
|
348 | |||
349 | def _runrecursivesymbol(context, mapping, key): |
|
349 | def _runrecursivesymbol(context, mapping, key): | |
350 | raise error.Abort(_("recursive reference '%s' in template") % key) |
|
350 | raise error.Abort(_("recursive reference '%s' in template") % key) | |
351 |
|
351 | |||
352 | def buildtemplate(exp, context): |
|
352 | def buildtemplate(exp, context): | |
353 | ctmpl = [compileexp(e, context, methods) for e in exp[1:]] |
|
353 | ctmpl = [compileexp(e, context, methods) for e in exp[1:]] | |
354 | return (templateutil.runtemplate, ctmpl) |
|
354 | return (templateutil.runtemplate, ctmpl) | |
355 |
|
355 | |||
356 | def buildfilter(exp, context): |
|
356 | def buildfilter(exp, context): | |
357 | n = getsymbol(exp[2]) |
|
357 | n = getsymbol(exp[2]) | |
358 | if n in context._filters: |
|
358 | if n in context._filters: | |
359 | filt = context._filters[n] |
|
359 | filt = context._filters[n] | |
360 | arg = compileexp(exp[1], context, methods) |
|
360 | arg = compileexp(exp[1], context, methods) | |
361 | return (templateutil.runfilter, (arg, filt)) |
|
361 | return (templateutil.runfilter, (arg, filt)) | |
362 | if n in context._funcs: |
|
362 | if n in context._funcs: | |
363 | f = context._funcs[n] |
|
363 | f = context._funcs[n] | |
364 | args = _buildfuncargs(exp[1], context, methods, n, f._argspec) |
|
364 | args = _buildfuncargs(exp[1], context, methods, n, f._argspec) | |
365 | return (f, args) |
|
365 | return (f, args) | |
366 | raise error.ParseError(_("unknown function '%s'") % n) |
|
366 | raise error.ParseError(_("unknown function '%s'") % n) | |
367 |
|
367 | |||
368 | def buildmap(exp, context): |
|
368 | def buildmap(exp, context): | |
369 | darg = compileexp(exp[1], context, methods) |
|
369 | darg = compileexp(exp[1], context, methods) | |
370 | targ = gettemplate(exp[2], context) |
|
370 | targ = gettemplate(exp[2], context) | |
371 | return (templateutil.runmap, (darg, targ)) |
|
371 | return (templateutil.runmap, (darg, targ)) | |
372 |
|
372 | |||
373 | def buildmember(exp, context): |
|
373 | def buildmember(exp, context): | |
374 | darg = compileexp(exp[1], context, methods) |
|
374 | darg = compileexp(exp[1], context, methods) | |
375 | memb = getsymbol(exp[2]) |
|
375 | memb = getsymbol(exp[2]) | |
376 | return (templateutil.runmember, (darg, memb)) |
|
376 | return (templateutil.runmember, (darg, memb)) | |
377 |
|
377 | |||
378 | def buildnegate(exp, context): |
|
378 | def buildnegate(exp, context): | |
379 | arg = compileexp(exp[1], context, exprmethods) |
|
379 | arg = compileexp(exp[1], context, exprmethods) | |
380 | return (templateutil.runnegate, arg) |
|
380 | return (templateutil.runnegate, arg) | |
381 |
|
381 | |||
382 | def buildarithmetic(exp, context, func): |
|
382 | def buildarithmetic(exp, context, func): | |
383 | left = compileexp(exp[1], context, exprmethods) |
|
383 | left = compileexp(exp[1], context, exprmethods) | |
384 | right = compileexp(exp[2], context, exprmethods) |
|
384 | right = compileexp(exp[2], context, exprmethods) | |
385 | return (templateutil.runarithmetic, (func, left, right)) |
|
385 | return (templateutil.runarithmetic, (func, left, right)) | |
386 |
|
386 | |||
387 | def buildfunc(exp, context): |
|
387 | def buildfunc(exp, context): | |
388 | n = getsymbol(exp[1]) |
|
388 | n = getsymbol(exp[1]) | |
389 | if n in context._funcs: |
|
389 | if n in context._funcs: | |
390 | f = context._funcs[n] |
|
390 | f = context._funcs[n] | |
391 | args = _buildfuncargs(exp[2], context, exprmethods, n, f._argspec) |
|
391 | args = _buildfuncargs(exp[2], context, exprmethods, n, f._argspec) | |
392 | return (f, args) |
|
392 | return (f, args) | |
393 | if n in context._filters: |
|
393 | if n in context._filters: | |
394 | args = _buildfuncargs(exp[2], context, exprmethods, n, argspec=None) |
|
394 | args = _buildfuncargs(exp[2], context, exprmethods, n, argspec=None) | |
395 | if len(args) != 1: |
|
395 | if len(args) != 1: | |
396 | raise error.ParseError(_("filter %s expects one argument") % n) |
|
396 | raise error.ParseError(_("filter %s expects one argument") % n) | |
397 | f = context._filters[n] |
|
397 | f = context._filters[n] | |
398 | return (templateutil.runfilter, (args[0], f)) |
|
398 | return (templateutil.runfilter, (args[0], f)) | |
399 | raise error.ParseError(_("unknown function '%s'") % n) |
|
399 | raise error.ParseError(_("unknown function '%s'") % n) | |
400 |
|
400 | |||
401 | def _buildfuncargs(exp, context, curmethods, funcname, argspec): |
|
401 | def _buildfuncargs(exp, context, curmethods, funcname, argspec): | |
402 | """Compile parsed tree of function arguments into list or dict of |
|
402 | """Compile parsed tree of function arguments into list or dict of | |
403 | (func, data) pairs |
|
403 | (func, data) pairs | |
404 |
|
404 | |||
405 | >>> context = engine(lambda t: (templateutil.runsymbol, t)) |
|
405 | >>> context = engine(lambda t: (templateutil.runsymbol, t)) | |
406 | >>> def fargs(expr, argspec): |
|
406 | >>> def fargs(expr, argspec): | |
407 | ... x = _parseexpr(expr) |
|
407 | ... x = _parseexpr(expr) | |
408 | ... n = getsymbol(x[1]) |
|
408 | ... n = getsymbol(x[1]) | |
409 | ... return _buildfuncargs(x[2], context, exprmethods, n, argspec) |
|
409 | ... return _buildfuncargs(x[2], context, exprmethods, n, argspec) | |
410 | >>> list(fargs(b'a(l=1, k=2)', b'k l m').keys()) |
|
410 | >>> list(fargs(b'a(l=1, k=2)', b'k l m').keys()) | |
411 | ['l', 'k'] |
|
411 | ['l', 'k'] | |
412 | >>> args = fargs(b'a(opts=1, k=2)', b'**opts') |
|
412 | >>> args = fargs(b'a(opts=1, k=2)', b'**opts') | |
413 | >>> list(args.keys()), list(args[b'opts'].keys()) |
|
413 | >>> list(args.keys()), list(args[b'opts'].keys()) | |
414 | (['opts'], ['opts', 'k']) |
|
414 | (['opts'], ['opts', 'k']) | |
415 | """ |
|
415 | """ | |
416 | def compiledict(xs): |
|
416 | def compiledict(xs): | |
417 | return util.sortdict((k, compileexp(x, context, curmethods)) |
|
417 | return util.sortdict((k, compileexp(x, context, curmethods)) | |
418 | for k, x in xs.iteritems()) |
|
418 | for k, x in xs.iteritems()) | |
419 | def compilelist(xs): |
|
419 | def compilelist(xs): | |
420 | return [compileexp(x, context, curmethods) for x in xs] |
|
420 | return [compileexp(x, context, curmethods) for x in xs] | |
421 |
|
421 | |||
422 | if not argspec: |
|
422 | if not argspec: | |
423 | # filter or function with no argspec: return list of positional args |
|
423 | # filter or function with no argspec: return list of positional args | |
424 | return compilelist(getlist(exp)) |
|
424 | return compilelist(getlist(exp)) | |
425 |
|
425 | |||
426 | # function with argspec: return dict of named args |
|
426 | # function with argspec: return dict of named args | |
427 | _poskeys, varkey, _keys, optkey = argspec = parser.splitargspec(argspec) |
|
427 | _poskeys, varkey, _keys, optkey = argspec = parser.splitargspec(argspec) | |
428 | treeargs = parser.buildargsdict(getlist(exp), funcname, argspec, |
|
428 | treeargs = parser.buildargsdict(getlist(exp), funcname, argspec, | |
429 | keyvaluenode='keyvalue', keynode='symbol') |
|
429 | keyvaluenode='keyvalue', keynode='symbol') | |
430 | compargs = util.sortdict() |
|
430 | compargs = util.sortdict() | |
431 | if varkey: |
|
431 | if varkey: | |
432 | compargs[varkey] = compilelist(treeargs.pop(varkey)) |
|
432 | compargs[varkey] = compilelist(treeargs.pop(varkey)) | |
433 | if optkey: |
|
433 | if optkey: | |
434 | compargs[optkey] = compiledict(treeargs.pop(optkey)) |
|
434 | compargs[optkey] = compiledict(treeargs.pop(optkey)) | |
435 | compargs.update(compiledict(treeargs)) |
|
435 | compargs.update(compiledict(treeargs)) | |
436 | return compargs |
|
436 | return compargs | |
437 |
|
437 | |||
438 | def buildkeyvaluepair(exp, content): |
|
438 | def buildkeyvaluepair(exp, content): | |
439 | raise error.ParseError(_("can't use a key-value pair in this context")) |
|
439 | raise error.ParseError(_("can't use a key-value pair in this context")) | |
440 |
|
440 | |||
441 | # methods to interpret function arguments or inner expressions (e.g. {_(x)}) |
|
441 | # methods to interpret function arguments or inner expressions (e.g. {_(x)}) | |
442 | exprmethods = { |
|
442 | exprmethods = { | |
443 | "integer": lambda e, c: (templateutil.runinteger, e[1]), |
|
443 | "integer": lambda e, c: (templateutil.runinteger, e[1]), | |
444 | "string": lambda e, c: (templateutil.runstring, e[1]), |
|
444 | "string": lambda e, c: (templateutil.runstring, e[1]), | |
445 | "symbol": lambda e, c: (templateutil.runsymbol, e[1]), |
|
445 | "symbol": lambda e, c: (templateutil.runsymbol, e[1]), | |
446 | "template": buildtemplate, |
|
446 | "template": buildtemplate, | |
447 | "group": lambda e, c: compileexp(e[1], c, exprmethods), |
|
447 | "group": lambda e, c: compileexp(e[1], c, exprmethods), | |
448 | ".": buildmember, |
|
448 | ".": buildmember, | |
449 | "|": buildfilter, |
|
449 | "|": buildfilter, | |
450 | "%": buildmap, |
|
450 | "%": buildmap, | |
451 | "func": buildfunc, |
|
451 | "func": buildfunc, | |
452 | "keyvalue": buildkeyvaluepair, |
|
452 | "keyvalue": buildkeyvaluepair, | |
453 | "+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b), |
|
453 | "+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b), | |
454 | "-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b), |
|
454 | "-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b), | |
455 | "negate": buildnegate, |
|
455 | "negate": buildnegate, | |
456 | "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b), |
|
456 | "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b), | |
457 | "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b), |
|
457 | "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b), | |
458 | } |
|
458 | } | |
459 |
|
459 | |||
460 | # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"}) |
|
460 | # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"}) | |
461 | methods = exprmethods.copy() |
|
461 | methods = exprmethods.copy() | |
462 | methods["integer"] = exprmethods["symbol"] # '{1}' as variable |
|
462 | methods["integer"] = exprmethods["symbol"] # '{1}' as variable | |
463 |
|
463 | |||
464 | class _aliasrules(parser.basealiasrules): |
|
464 | class _aliasrules(parser.basealiasrules): | |
465 | """Parsing and expansion rule set of template aliases""" |
|
465 | """Parsing and expansion rule set of template aliases""" | |
466 | _section = _('template alias') |
|
466 | _section = _('template alias') | |
467 | _parse = staticmethod(_parseexpr) |
|
467 | _parse = staticmethod(_parseexpr) | |
468 |
|
468 | |||
469 | @staticmethod |
|
469 | @staticmethod | |
470 | def _trygetfunc(tree): |
|
470 | def _trygetfunc(tree): | |
471 | """Return (name, args) if tree is func(...) or ...|filter; otherwise |
|
471 | """Return (name, args) if tree is func(...) or ...|filter; otherwise | |
472 | None""" |
|
472 | None""" | |
473 | if tree[0] == 'func' and tree[1][0] == 'symbol': |
|
473 | if tree[0] == 'func' and tree[1][0] == 'symbol': | |
474 | return tree[1][1], getlist(tree[2]) |
|
474 | return tree[1][1], getlist(tree[2]) | |
475 | if tree[0] == '|' and tree[2][0] == 'symbol': |
|
475 | if tree[0] == '|' and tree[2][0] == 'symbol': | |
476 | return tree[2][1], [tree[1]] |
|
476 | return tree[2][1], [tree[1]] | |
477 |
|
477 | |||
478 | def expandaliases(tree, aliases): |
|
478 | def expandaliases(tree, aliases): | |
479 | """Return new tree of aliases are expanded""" |
|
479 | """Return new tree of aliases are expanded""" | |
480 | aliasmap = _aliasrules.buildmap(aliases) |
|
480 | aliasmap = _aliasrules.buildmap(aliases) | |
481 | return _aliasrules.expand(aliasmap, tree) |
|
481 | return _aliasrules.expand(aliasmap, tree) | |
482 |
|
482 | |||
483 | # template engine |
|
483 | # template engine | |
484 |
|
484 | |||
485 | def _flatten(thing): |
|
485 | def _flatten(thing): | |
486 | '''yield a single stream from a possibly nested set of iterators''' |
|
486 | '''yield a single stream from a possibly nested set of iterators''' | |
487 | thing = templateutil.unwraphybrid(thing) |
|
487 | thing = templateutil.unwraphybrid(thing) | |
488 | if isinstance(thing, bytes): |
|
488 | if isinstance(thing, bytes): | |
489 | yield thing |
|
489 | yield thing | |
490 | elif isinstance(thing, str): |
|
490 | elif isinstance(thing, str): | |
491 | # We can only hit this on Python 3, and it's here to guard |
|
491 | # We can only hit this on Python 3, and it's here to guard | |
492 | # against infinite recursion. |
|
492 | # against infinite recursion. | |
493 | raise error.ProgrammingError('Mercurial IO including templates is done' |
|
493 | raise error.ProgrammingError('Mercurial IO including templates is done' | |
494 | ' with bytes, not strings, got %r' % thing) |
|
494 | ' with bytes, not strings, got %r' % thing) | |
495 | elif thing is None: |
|
495 | elif thing is None: | |
496 | pass |
|
496 | pass | |
497 | elif not util.safehasattr(thing, '__iter__'): |
|
497 | elif not util.safehasattr(thing, '__iter__'): | |
498 | yield pycompat.bytestr(thing) |
|
498 | yield pycompat.bytestr(thing) | |
499 | else: |
|
499 | else: | |
500 | for i in thing: |
|
500 | for i in thing: | |
501 | i = templateutil.unwraphybrid(i) |
|
501 | i = templateutil.unwraphybrid(i) | |
502 | if isinstance(i, bytes): |
|
502 | if isinstance(i, bytes): | |
503 | yield i |
|
503 | yield i | |
504 | elif i is None: |
|
504 | elif i is None: | |
505 | pass |
|
505 | pass | |
506 | elif not util.safehasattr(i, '__iter__'): |
|
506 | elif not util.safehasattr(i, '__iter__'): | |
507 | yield pycompat.bytestr(i) |
|
507 | yield pycompat.bytestr(i) | |
508 | else: |
|
508 | else: | |
509 | for j in _flatten(i): |
|
509 | for j in _flatten(i): | |
510 | yield j |
|
510 | yield j | |
511 |
|
511 | |||
512 | def unquotestring(s): |
|
512 | def unquotestring(s): | |
513 | '''unwrap quotes if any; otherwise returns unmodified string''' |
|
513 | '''unwrap quotes if any; otherwise returns unmodified string''' | |
514 | if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]: |
|
514 | if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]: | |
515 | return s |
|
515 | return s | |
516 | return s[1:-1] |
|
516 | return s[1:-1] | |
517 |
|
517 | |||
518 | class engine(object): |
|
518 | class engine(object): | |
519 | '''template expansion engine. |
|
519 | '''template expansion engine. | |
520 |
|
520 | |||
521 | template expansion works like this. a map file contains key=value |
|
521 | template expansion works like this. a map file contains key=value | |
522 | pairs. if value is quoted, it is treated as string. otherwise, it |
|
522 | pairs. if value is quoted, it is treated as string. otherwise, it | |
523 | is treated as name of template file. |
|
523 | is treated as name of template file. | |
524 |
|
524 | |||
525 | templater is asked to expand a key in map. it looks up key, and |
|
525 | templater is asked to expand a key in map. it looks up key, and | |
526 | looks for strings like this: {foo}. it expands {foo} by looking up |
|
526 | looks for strings like this: {foo}. it expands {foo} by looking up | |
527 | foo in map, and substituting it. expansion is recursive: it stops |
|
527 | foo in map, and substituting it. expansion is recursive: it stops | |
528 | when there is no more {foo} to replace. |
|
528 | when there is no more {foo} to replace. | |
529 |
|
529 | |||
530 | expansion also allows formatting and filtering. |
|
530 | expansion also allows formatting and filtering. | |
531 |
|
531 | |||
532 | format uses key to expand each item in list. syntax is |
|
532 | format uses key to expand each item in list. syntax is | |
533 | {key%format}. |
|
533 | {key%format}. | |
534 |
|
534 | |||
535 | filter uses function to transform value. syntax is |
|
535 | filter uses function to transform value. syntax is | |
536 | {key|filter1|filter2|...}.''' |
|
536 | {key|filter1|filter2|...}.''' | |
537 |
|
537 | |||
538 | def __init__(self, loader, filters=None, defaults=None, resources=None, |
|
538 | def __init__(self, loader, filters=None, defaults=None, resources=None, | |
539 | aliases=()): |
|
539 | aliases=()): | |
540 | self._loader = loader |
|
540 | self._loader = loader | |
541 | if filters is None: |
|
541 | if filters is None: | |
542 | filters = {} |
|
542 | filters = {} | |
543 | self._filters = filters |
|
543 | self._filters = filters | |
544 | self._funcs = templatefuncs.funcs # make this a parameter if needed |
|
544 | self._funcs = templatefuncs.funcs # make this a parameter if needed | |
545 | if defaults is None: |
|
545 | if defaults is None: | |
546 | defaults = {} |
|
546 | defaults = {} | |
547 | if resources is None: |
|
547 | if resources is None: | |
548 | resources = {} |
|
548 | resources = {} | |
549 | self._defaults = defaults |
|
549 | self._defaults = defaults | |
550 | self._resources = resources |
|
550 | self._resources = resources | |
551 | self._aliasmap = _aliasrules.buildmap(aliases) |
|
551 | self._aliasmap = _aliasrules.buildmap(aliases) | |
552 | self._cache = {} # key: (func, data) |
|
552 | self._cache = {} # key: (func, data) | |
553 |
|
553 | |||
554 | def symbol(self, mapping, key): |
|
554 | def symbol(self, mapping, key): | |
555 | """Resolve symbol to value or function; None if nothing found""" |
|
555 | """Resolve symbol to value or function; None if nothing found""" | |
556 | v = None |
|
556 | v = None | |
557 | if key not in self._resources: |
|
557 | if key not in self._resources: | |
558 | v = mapping.get(key) |
|
558 | v = mapping.get(key) | |
559 | if v is None: |
|
559 | if v is None: | |
560 | v = self._defaults.get(key) |
|
560 | v = self._defaults.get(key) | |
561 | return v |
|
561 | return v | |
562 |
|
562 | |||
563 | def resource(self, mapping, key): |
|
563 | def resource(self, mapping, key): | |
564 | """Return internal data (e.g. cache) used for keyword/function |
|
564 | """Return internal data (e.g. cache) used for keyword/function | |
565 | evaluation""" |
|
565 | evaluation""" | |
566 | v = None |
|
566 | v = None | |
567 | if key in self._resources: |
|
567 | if key in self._resources: | |
568 | v = self._resources[key](self, mapping, key) |
|
568 | v = self._resources[key](self, mapping, key) | |
569 | if v is None: |
|
569 | if v is None: | |
570 | raise templateutil.ResourceUnavailable( |
|
570 | raise templateutil.ResourceUnavailable( | |
571 | _('template resource not available: %s') % key) |
|
571 | _('template resource not available: %s') % key) | |
572 | return v |
|
572 | return v | |
573 |
|
573 | |||
574 | def _load(self, t): |
|
574 | def _load(self, t): | |
575 | '''load, parse, and cache a template''' |
|
575 | '''load, parse, and cache a template''' | |
576 | if t not in self._cache: |
|
576 | if t not in self._cache: | |
577 | # put poison to cut recursion while compiling 't' |
|
577 | # put poison to cut recursion while compiling 't' | |
578 | self._cache[t] = (_runrecursivesymbol, t) |
|
578 | self._cache[t] = (_runrecursivesymbol, t) | |
579 | try: |
|
579 | try: | |
580 | x = parse(self._loader(t)) |
|
580 | x = parse(self._loader(t)) | |
581 | if self._aliasmap: |
|
581 | if self._aliasmap: | |
582 | x = _aliasrules.expand(self._aliasmap, x) |
|
582 | x = _aliasrules.expand(self._aliasmap, x) | |
583 | self._cache[t] = compileexp(x, self, methods) |
|
583 | self._cache[t] = compileexp(x, self, methods) | |
584 | except: # re-raises |
|
584 | except: # re-raises | |
585 | del self._cache[t] |
|
585 | del self._cache[t] | |
586 | raise |
|
586 | raise | |
587 | return self._cache[t] |
|
587 | return self._cache[t] | |
588 |
|
588 | |||
589 | def process(self, t, mapping): |
|
589 | def process(self, t, mapping): | |
590 | '''Perform expansion. t is name of map element to expand. |
|
590 | '''Perform expansion. t is name of map element to expand. | |
591 | mapping contains added elements for use during expansion. Is a |
|
591 | mapping contains added elements for use during expansion. Is a | |
592 | generator.''' |
|
592 | generator.''' | |
593 | func, data = self._load(t) |
|
593 | func, data = self._load(t) | |
594 | return _flatten(func(self, mapping, data)) |
|
594 | return _flatten(func(self, mapping, data)) | |
595 |
|
595 | |||
596 | engines = {'default': engine} |
|
596 | engines = {'default': engine} | |
597 |
|
597 | |||
598 | def stylelist(): |
|
598 | def stylelist(): | |
599 | paths = templatepaths() |
|
599 | paths = templatepaths() | |
600 | if not paths: |
|
600 | if not paths: | |
601 | return _('no templates found, try `hg debuginstall` for more info') |
|
601 | return _('no templates found, try `hg debuginstall` for more info') | |
602 | dirlist = os.listdir(paths[0]) |
|
602 | dirlist = os.listdir(paths[0]) | |
603 | stylelist = [] |
|
603 | stylelist = [] | |
604 | for file in dirlist: |
|
604 | for file in dirlist: | |
605 | split = file.split(".") |
|
605 | split = file.split(".") | |
606 | if split[-1] in ('orig', 'rej'): |
|
606 | if split[-1] in ('orig', 'rej'): | |
607 | continue |
|
607 | continue | |
608 | if split[0] == "map-cmdline": |
|
608 | if split[0] == "map-cmdline": | |
609 | stylelist.append(split[1]) |
|
609 | stylelist.append(split[1]) | |
610 | return ", ".join(sorted(stylelist)) |
|
610 | return ", ".join(sorted(stylelist)) | |
611 |
|
611 | |||
612 | def _readmapfile(mapfile): |
|
612 | def _readmapfile(mapfile): | |
613 | """Load template elements from the given map file""" |
|
613 | """Load template elements from the given map file""" | |
614 | if not os.path.exists(mapfile): |
|
614 | if not os.path.exists(mapfile): | |
615 | raise error.Abort(_("style '%s' not found") % mapfile, |
|
615 | raise error.Abort(_("style '%s' not found") % mapfile, | |
616 | hint=_("available styles: %s") % stylelist()) |
|
616 | hint=_("available styles: %s") % stylelist()) | |
617 |
|
617 | |||
618 | base = os.path.dirname(mapfile) |
|
618 | base = os.path.dirname(mapfile) | |
619 | conf = config.config(includepaths=templatepaths()) |
|
619 | conf = config.config(includepaths=templatepaths()) | |
620 | conf.read(mapfile, remap={'': 'templates'}) |
|
620 | conf.read(mapfile, remap={'': 'templates'}) | |
621 |
|
621 | |||
622 | cache = {} |
|
622 | cache = {} | |
623 | tmap = {} |
|
623 | tmap = {} | |
624 | aliases = [] |
|
624 | aliases = [] | |
625 |
|
625 | |||
626 | val = conf.get('templates', '__base__') |
|
626 | val = conf.get('templates', '__base__') | |
627 | if val and val[0] not in "'\"": |
|
627 | if val and val[0] not in "'\"": | |
628 | # treat as a pointer to a base class for this style |
|
628 | # treat as a pointer to a base class for this style | |
629 | path = util.normpath(os.path.join(base, val)) |
|
629 | path = util.normpath(os.path.join(base, val)) | |
630 |
|
630 | |||
631 | # fallback check in template paths |
|
631 | # fallback check in template paths | |
632 | if not os.path.exists(path): |
|
632 | if not os.path.exists(path): | |
633 | for p in templatepaths(): |
|
633 | for p in templatepaths(): | |
634 | p2 = util.normpath(os.path.join(p, val)) |
|
634 | p2 = util.normpath(os.path.join(p, val)) | |
635 | if os.path.isfile(p2): |
|
635 | if os.path.isfile(p2): | |
636 | path = p2 |
|
636 | path = p2 | |
637 | break |
|
637 | break | |
638 | p3 = util.normpath(os.path.join(p2, "map")) |
|
638 | p3 = util.normpath(os.path.join(p2, "map")) | |
639 | if os.path.isfile(p3): |
|
639 | if os.path.isfile(p3): | |
640 | path = p3 |
|
640 | path = p3 | |
641 | break |
|
641 | break | |
642 |
|
642 | |||
643 | cache, tmap, aliases = _readmapfile(path) |
|
643 | cache, tmap, aliases = _readmapfile(path) | |
644 |
|
644 | |||
645 | for key, val in conf['templates'].items(): |
|
645 | for key, val in conf['templates'].items(): | |
646 | if not val: |
|
646 | if not val: | |
647 | raise error.ParseError(_('missing value'), |
|
647 | raise error.ParseError(_('missing value'), | |
648 | conf.source('templates', key)) |
|
648 | conf.source('templates', key)) | |
649 | if val[0] in "'\"": |
|
649 | if val[0] in "'\"": | |
650 | if val[0] != val[-1]: |
|
650 | if val[0] != val[-1]: | |
651 | raise error.ParseError(_('unmatched quotes'), |
|
651 | raise error.ParseError(_('unmatched quotes'), | |
652 | conf.source('templates', key)) |
|
652 | conf.source('templates', key)) | |
653 | cache[key] = unquotestring(val) |
|
653 | cache[key] = unquotestring(val) | |
654 | elif key != '__base__': |
|
654 | elif key != '__base__': | |
655 | val = 'default', val |
|
655 | val = 'default', val | |
656 | if ':' in val[1]: |
|
656 | if ':' in val[1]: | |
657 | val = val[1].split(':', 1) |
|
657 | val = val[1].split(':', 1) | |
658 | tmap[key] = val[0], os.path.join(base, val[1]) |
|
658 | tmap[key] = val[0], os.path.join(base, val[1]) | |
659 | aliases.extend(conf['templatealias'].items()) |
|
659 | aliases.extend(conf['templatealias'].items()) | |
660 | return cache, tmap, aliases |
|
660 | return cache, tmap, aliases | |
661 |
|
661 | |||
662 | class templater(object): |
|
662 | class templater(object): | |
663 |
|
663 | |||
664 | def __init__(self, filters=None, defaults=None, resources=None, |
|
664 | def __init__(self, filters=None, defaults=None, resources=None, | |
665 | cache=None, aliases=(), minchunk=1024, maxchunk=65536): |
|
665 | cache=None, aliases=(), minchunk=1024, maxchunk=65536): | |
666 | """Create template engine optionally with preloaded template fragments |
|
666 | """Create template engine optionally with preloaded template fragments | |
667 |
|
667 | |||
668 | - ``filters``: a dict of functions to transform a value into another. |
|
668 | - ``filters``: a dict of functions to transform a value into another. | |
669 | - ``defaults``: a dict of symbol values/functions; may be overridden |
|
669 | - ``defaults``: a dict of symbol values/functions; may be overridden | |
670 | by a ``mapping`` dict. |
|
670 | by a ``mapping`` dict. | |
671 | - ``resources``: a dict of functions returning internal data |
|
671 | - ``resources``: a dict of functions returning internal data | |
672 | (e.g. cache), inaccessible from user template. |
|
672 | (e.g. cache), inaccessible from user template. | |
673 | - ``cache``: a dict of preloaded template fragments. |
|
673 | - ``cache``: a dict of preloaded template fragments. | |
674 | - ``aliases``: a list of alias (name, replacement) pairs. |
|
674 | - ``aliases``: a list of alias (name, replacement) pairs. | |
675 |
|
675 | |||
676 | self.cache may be updated later to register additional template |
|
676 | self.cache may be updated later to register additional template | |
677 | fragments. |
|
677 | fragments. | |
678 | """ |
|
678 | """ | |
679 | if filters is None: |
|
679 | if filters is None: | |
680 | filters = {} |
|
680 | filters = {} | |
681 | if defaults is None: |
|
681 | if defaults is None: | |
682 | defaults = {} |
|
682 | defaults = {} | |
683 | if resources is None: |
|
683 | if resources is None: | |
684 | resources = {} |
|
684 | resources = {} | |
685 | if cache is None: |
|
685 | if cache is None: | |
686 | cache = {} |
|
686 | cache = {} | |
687 | self.cache = cache.copy() |
|
687 | self.cache = cache.copy() | |
688 | self.map = {} |
|
688 | self.map = {} | |
689 | self.filters = templatefilters.filters.copy() |
|
689 | self.filters = templatefilters.filters.copy() | |
690 | self.filters.update(filters) |
|
690 | self.filters.update(filters) | |
691 | self.defaults = defaults |
|
691 | self.defaults = defaults | |
692 | self._resources = {'templ': lambda context, mapping, key: self} |
|
692 | self._resources = {'templ': lambda context, mapping, key: self} | |
693 | self._resources.update(resources) |
|
693 | self._resources.update(resources) | |
694 | self._aliases = aliases |
|
694 | self._aliases = aliases | |
695 | self.minchunk, self.maxchunk = minchunk, maxchunk |
|
695 | self.minchunk, self.maxchunk = minchunk, maxchunk | |
696 | self.ecache = {} |
|
696 | self.ecache = {} | |
697 |
|
697 | |||
698 | @classmethod |
|
698 | @classmethod | |
699 | def frommapfile(cls, mapfile, filters=None, defaults=None, resources=None, |
|
699 | def frommapfile(cls, mapfile, filters=None, defaults=None, resources=None, | |
700 | cache=None, minchunk=1024, maxchunk=65536): |
|
700 | cache=None, minchunk=1024, maxchunk=65536): | |
701 | """Create templater from the specified map file""" |
|
701 | """Create templater from the specified map file""" | |
702 | t = cls(filters, defaults, resources, cache, [], minchunk, maxchunk) |
|
702 | t = cls(filters, defaults, resources, cache, [], minchunk, maxchunk) | |
703 | cache, tmap, aliases = _readmapfile(mapfile) |
|
703 | cache, tmap, aliases = _readmapfile(mapfile) | |
704 | t.cache.update(cache) |
|
704 | t.cache.update(cache) | |
705 | t.map = tmap |
|
705 | t.map = tmap | |
706 | t._aliases = aliases |
|
706 | t._aliases = aliases | |
707 | return t |
|
707 | return t | |
708 |
|
708 | |||
709 | def __contains__(self, key): |
|
709 | def __contains__(self, key): | |
710 | return key in self.cache or key in self.map |
|
710 | return key in self.cache or key in self.map | |
711 |
|
711 | |||
712 | def load(self, t): |
|
712 | def load(self, t): | |
713 | '''Get the template for the given template name. Use a local cache.''' |
|
713 | '''Get the template for the given template name. Use a local cache.''' | |
714 | if t not in self.cache: |
|
714 | if t not in self.cache: | |
715 | try: |
|
715 | try: | |
716 | self.cache[t] = util.readfile(self.map[t][1]) |
|
716 | self.cache[t] = util.readfile(self.map[t][1]) | |
717 | except KeyError as inst: |
|
717 | except KeyError as inst: | |
718 | raise templateutil.TemplateNotFound( |
|
718 | raise templateutil.TemplateNotFound( | |
719 | _('"%s" not in template map') % inst.args[0]) |
|
719 | _('"%s" not in template map') % inst.args[0]) | |
720 | except IOError as inst: |
|
720 | except IOError as inst: | |
721 | reason = (_('template file %s: %s') |
|
721 | reason = (_('template file %s: %s') | |
722 | % (self.map[t][1], util.forcebytestr(inst.args[1]))) |
|
722 | % (self.map[t][1], util.forcebytestr(inst.args[1]))) | |
723 | raise IOError(inst.args[0], encoding.strfromlocal(reason)) |
|
723 | raise IOError(inst.args[0], encoding.strfromlocal(reason)) | |
724 | return self.cache[t] |
|
724 | return self.cache[t] | |
725 |
|
725 | |||
726 | def renderdefault(self, mapping): |
|
726 | def renderdefault(self, mapping): | |
727 | """Render the default unnamed template and return result as string""" |
|
727 | """Render the default unnamed template and return result as string""" | |
|
728 | return self.render('', mapping) | |||
|
729 | ||||
|
730 | def render(self, t, mapping): | |||
|
731 | """Render the specified named template and return result as string""" | |||
728 | mapping = pycompat.strkwargs(mapping) |
|
732 | mapping = pycompat.strkwargs(mapping) | |
729 |
return templateutil.stringify(self( |
|
733 | return templateutil.stringify(self(t, **mapping)) | |
730 |
|
734 | |||
731 | def __call__(self, t, **mapping): |
|
735 | def __call__(self, t, **mapping): | |
732 | mapping = pycompat.byteskwargs(mapping) |
|
736 | mapping = pycompat.byteskwargs(mapping) | |
733 | ttype = t in self.map and self.map[t][0] or 'default' |
|
737 | ttype = t in self.map and self.map[t][0] or 'default' | |
734 | if ttype not in self.ecache: |
|
738 | if ttype not in self.ecache: | |
735 | try: |
|
739 | try: | |
736 | ecls = engines[ttype] |
|
740 | ecls = engines[ttype] | |
737 | except KeyError: |
|
741 | except KeyError: | |
738 | raise error.Abort(_('invalid template engine: %s') % ttype) |
|
742 | raise error.Abort(_('invalid template engine: %s') % ttype) | |
739 | self.ecache[ttype] = ecls(self.load, self.filters, self.defaults, |
|
743 | self.ecache[ttype] = ecls(self.load, self.filters, self.defaults, | |
740 | self._resources, self._aliases) |
|
744 | self._resources, self._aliases) | |
741 | proc = self.ecache[ttype] |
|
745 | proc = self.ecache[ttype] | |
742 |
|
746 | |||
743 | stream = proc.process(t, mapping) |
|
747 | stream = proc.process(t, mapping) | |
744 | if self.minchunk: |
|
748 | if self.minchunk: | |
745 | stream = util.increasingchunks(stream, min=self.minchunk, |
|
749 | stream = util.increasingchunks(stream, min=self.minchunk, | |
746 | max=self.maxchunk) |
|
750 | max=self.maxchunk) | |
747 | return stream |
|
751 | return stream | |
748 |
|
752 | |||
749 | def templatepaths(): |
|
753 | def templatepaths(): | |
750 | '''return locations used for template files.''' |
|
754 | '''return locations used for template files.''' | |
751 | pathsrel = ['templates'] |
|
755 | pathsrel = ['templates'] | |
752 | paths = [os.path.normpath(os.path.join(util.datapath, f)) |
|
756 | paths = [os.path.normpath(os.path.join(util.datapath, f)) | |
753 | for f in pathsrel] |
|
757 | for f in pathsrel] | |
754 | return [p for p in paths if os.path.isdir(p)] |
|
758 | return [p for p in paths if os.path.isdir(p)] | |
755 |
|
759 | |||
756 | def templatepath(name): |
|
760 | def templatepath(name): | |
757 | '''return location of template file. returns None if not found.''' |
|
761 | '''return location of template file. returns None if not found.''' | |
758 | for p in templatepaths(): |
|
762 | for p in templatepaths(): | |
759 | f = os.path.join(p, name) |
|
763 | f = os.path.join(p, name) | |
760 | if os.path.exists(f): |
|
764 | if os.path.exists(f): | |
761 | return f |
|
765 | return f | |
762 | return None |
|
766 | return None | |
763 |
|
767 | |||
764 | def stylemap(styles, paths=None): |
|
768 | def stylemap(styles, paths=None): | |
765 | """Return path to mapfile for a given style. |
|
769 | """Return path to mapfile for a given style. | |
766 |
|
770 | |||
767 | Searches mapfile in the following locations: |
|
771 | Searches mapfile in the following locations: | |
768 | 1. templatepath/style/map |
|
772 | 1. templatepath/style/map | |
769 | 2. templatepath/map-style |
|
773 | 2. templatepath/map-style | |
770 | 3. templatepath/map |
|
774 | 3. templatepath/map | |
771 | """ |
|
775 | """ | |
772 |
|
776 | |||
773 | if paths is None: |
|
777 | if paths is None: | |
774 | paths = templatepaths() |
|
778 | paths = templatepaths() | |
775 | elif isinstance(paths, bytes): |
|
779 | elif isinstance(paths, bytes): | |
776 | paths = [paths] |
|
780 | paths = [paths] | |
777 |
|
781 | |||
778 | if isinstance(styles, bytes): |
|
782 | if isinstance(styles, bytes): | |
779 | styles = [styles] |
|
783 | styles = [styles] | |
780 |
|
784 | |||
781 | for style in styles: |
|
785 | for style in styles: | |
782 | # only plain name is allowed to honor template paths |
|
786 | # only plain name is allowed to honor template paths | |
783 | if (not style |
|
787 | if (not style | |
784 | or style in (pycompat.oscurdir, pycompat.ospardir) |
|
788 | or style in (pycompat.oscurdir, pycompat.ospardir) | |
785 | or pycompat.ossep in style |
|
789 | or pycompat.ossep in style | |
786 | or pycompat.osaltsep and pycompat.osaltsep in style): |
|
790 | or pycompat.osaltsep and pycompat.osaltsep in style): | |
787 | continue |
|
791 | continue | |
788 | locations = [os.path.join(style, 'map'), 'map-' + style] |
|
792 | locations = [os.path.join(style, 'map'), 'map-' + style] | |
789 | locations.append('map') |
|
793 | locations.append('map') | |
790 |
|
794 | |||
791 | for path in paths: |
|
795 | for path in paths: | |
792 | for location in locations: |
|
796 | for location in locations: | |
793 | mapfile = os.path.join(path, location) |
|
797 | mapfile = os.path.join(path, location) | |
794 | if os.path.isfile(mapfile): |
|
798 | if os.path.isfile(mapfile): | |
795 | return style, mapfile |
|
799 | return style, mapfile | |
796 |
|
800 | |||
797 | raise RuntimeError("No hgweb templates found in %r" % paths) |
|
801 | raise RuntimeError("No hgweb templates found in %r" % paths) |
General Comments 0
You need to be logged in to leave comments.
Login now