Show More
@@ -1,579 +1,577 b'' | |||
|
1 | 1 | # formatter.py - generic output formatting for mercurial |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2012 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | """Generic output formatting for Mercurial |
|
9 | 9 | |
|
10 | 10 | The formatter provides API to show data in various ways. The following |
|
11 | 11 | functions should be used in place of ui.write(): |
|
12 | 12 | |
|
13 | 13 | - fm.write() for unconditional output |
|
14 | 14 | - fm.condwrite() to show some extra data conditionally in plain output |
|
15 | 15 | - fm.context() to provide changectx to template output |
|
16 | 16 | - fm.data() to provide extra data to JSON or template output |
|
17 | 17 | - fm.plain() to show raw text that isn't provided to JSON or template output |
|
18 | 18 | |
|
19 | 19 | To show structured data (e.g. date tuples, dicts, lists), apply fm.format*() |
|
20 | 20 | beforehand so the data is converted to the appropriate data type. Use |
|
21 | 21 | fm.isplain() if you need to convert or format data conditionally which isn't |
|
22 | 22 | supported by the formatter API. |
|
23 | 23 | |
|
24 | 24 | To build nested structure (i.e. a list of dicts), use fm.nested(). |
|
25 | 25 | |
|
26 | 26 | See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan |
|
27 | 27 | |
|
28 | 28 | fm.condwrite() vs 'if cond:': |
|
29 | 29 | |
|
30 | 30 | In most cases, use fm.condwrite() so users can selectively show the data |
|
31 | 31 | in template output. If it's costly to build data, use plain 'if cond:' with |
|
32 | 32 | fm.write(). |
|
33 | 33 | |
|
34 | 34 | fm.nested() vs fm.formatdict() (or fm.formatlist()): |
|
35 | 35 | |
|
36 | 36 | fm.nested() should be used to form a tree structure (a list of dicts of |
|
37 | 37 | lists of dicts...) which can be accessed through template keywords, e.g. |
|
38 | 38 | "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict() |
|
39 | 39 | exports a dict-type object to template, which can be accessed by e.g. |
|
40 | 40 | "{get(foo, key)}" function. |
|
41 | 41 | |
|
42 | 42 | Doctest helper: |
|
43 | 43 | |
|
44 | 44 | >>> def show(fn, verbose=False, **opts): |
|
45 | 45 | ... import sys |
|
46 | 46 | ... from . import ui as uimod |
|
47 | 47 | ... ui = uimod.ui() |
|
48 | 48 | ... ui.verbose = verbose |
|
49 | 49 | ... ui.pushbuffer() |
|
50 | 50 | ... try: |
|
51 | 51 | ... return fn(ui, ui.formatter(pycompat.sysbytes(fn.__name__), |
|
52 | 52 | ... pycompat.byteskwargs(opts))) |
|
53 | 53 | ... finally: |
|
54 | 54 | ... print(pycompat.sysstr(ui.popbuffer()), end='') |
|
55 | 55 | |
|
56 | 56 | Basic example: |
|
57 | 57 | |
|
58 | 58 | >>> def files(ui, fm): |
|
59 | 59 | ... files = [(b'foo', 123, (0, 0)), (b'bar', 456, (1, 0))] |
|
60 | 60 | ... for f in files: |
|
61 | 61 | ... fm.startitem() |
|
62 | 62 | ... fm.write(b'path', b'%s', f[0]) |
|
63 | 63 | ... fm.condwrite(ui.verbose, b'date', b' %s', |
|
64 | 64 | ... fm.formatdate(f[2], b'%Y-%m-%d %H:%M:%S')) |
|
65 | 65 | ... fm.data(size=f[1]) |
|
66 | 66 | ... fm.plain(b'\\n') |
|
67 | 67 | ... fm.end() |
|
68 | 68 | >>> show(files) |
|
69 | 69 | foo |
|
70 | 70 | bar |
|
71 | 71 | >>> show(files, verbose=True) |
|
72 | 72 | foo 1970-01-01 00:00:00 |
|
73 | 73 | bar 1970-01-01 00:00:01 |
|
74 | 74 | >>> show(files, template=b'json') |
|
75 | 75 | [ |
|
76 | 76 | { |
|
77 | 77 | "date": [0, 0], |
|
78 | 78 | "path": "foo", |
|
79 | 79 | "size": 123 |
|
80 | 80 | }, |
|
81 | 81 | { |
|
82 | 82 | "date": [1, 0], |
|
83 | 83 | "path": "bar", |
|
84 | 84 | "size": 456 |
|
85 | 85 | } |
|
86 | 86 | ] |
|
87 | 87 | >>> show(files, template=b'path: {path}\\ndate: {date|rfc3339date}\\n') |
|
88 | 88 | path: foo |
|
89 | 89 | date: 1970-01-01T00:00:00+00:00 |
|
90 | 90 | path: bar |
|
91 | 91 | date: 1970-01-01T00:00:01+00:00 |
|
92 | 92 | |
|
93 | 93 | Nested example: |
|
94 | 94 | |
|
95 | 95 | >>> def subrepos(ui, fm): |
|
96 | 96 | ... fm.startitem() |
|
97 | 97 | ... fm.write(b'reponame', b'[%s]\\n', b'baz') |
|
98 | 98 | ... files(ui, fm.nested(b'files')) |
|
99 | 99 | ... fm.end() |
|
100 | 100 | >>> show(subrepos) |
|
101 | 101 | [baz] |
|
102 | 102 | foo |
|
103 | 103 | bar |
|
104 | 104 | >>> show(subrepos, template=b'{reponame}: {join(files % "{path}", ", ")}\\n') |
|
105 | 105 | baz: foo, bar |
|
106 | 106 | """ |
|
107 | 107 | |
|
108 | 108 | from __future__ import absolute_import, print_function |
|
109 | 109 | |
|
110 | 110 | import collections |
|
111 | 111 | import contextlib |
|
112 | 112 | import itertools |
|
113 | 113 | import os |
|
114 | 114 | |
|
115 | 115 | from .i18n import _ |
|
116 | 116 | from .node import ( |
|
117 | 117 | hex, |
|
118 | 118 | short, |
|
119 | 119 | ) |
|
120 | 120 | |
|
121 | 121 | from . import ( |
|
122 | 122 | error, |
|
123 | 123 | pycompat, |
|
124 | 124 | templatefilters, |
|
125 | 125 | templatekw, |
|
126 | 126 | templater, |
|
127 | 127 | templateutil, |
|
128 | 128 | util, |
|
129 | 129 | ) |
|
130 | 130 | from .utils import dateutil |
|
131 | 131 | |
|
132 | 132 | pickle = util.pickle |
|
133 | 133 | |
|
134 | 134 | class _nullconverter(object): |
|
135 | 135 | '''convert non-primitive data types to be processed by formatter''' |
|
136 | 136 | |
|
137 | 137 | # set to True if context object should be stored as item |
|
138 | 138 | storecontext = False |
|
139 | 139 | |
|
140 | 140 | @staticmethod |
|
141 | 141 | def formatdate(date, fmt): |
|
142 | 142 | '''convert date tuple to appropriate format''' |
|
143 | 143 | return date |
|
144 | 144 | @staticmethod |
|
145 | 145 | def formatdict(data, key, value, fmt, sep): |
|
146 | 146 | '''convert dict or key-value pairs to appropriate dict format''' |
|
147 | 147 | # use plain dict instead of util.sortdict so that data can be |
|
148 | 148 | # serialized as a builtin dict in pickle output |
|
149 | 149 | return dict(data) |
|
150 | 150 | @staticmethod |
|
151 | 151 | def formatlist(data, name, fmt, sep): |
|
152 | 152 | '''convert iterable to appropriate list format''' |
|
153 | 153 | return list(data) |
|
154 | 154 | |
|
155 | 155 | class baseformatter(object): |
|
156 | 156 | def __init__(self, ui, topic, opts, converter): |
|
157 | 157 | self._ui = ui |
|
158 | 158 | self._topic = topic |
|
159 | 159 | self._style = opts.get("style") |
|
160 | 160 | self._template = opts.get("template") |
|
161 | 161 | self._converter = converter |
|
162 | 162 | self._item = None |
|
163 | 163 | # function to convert node to string suitable for this output |
|
164 | 164 | self.hexfunc = hex |
|
165 | 165 | def __enter__(self): |
|
166 | 166 | return self |
|
167 | 167 | def __exit__(self, exctype, excvalue, traceback): |
|
168 | 168 | if exctype is None: |
|
169 | 169 | self.end() |
|
170 | 170 | def _showitem(self): |
|
171 | 171 | '''show a formatted item once all data is collected''' |
|
172 | 172 | def startitem(self): |
|
173 | 173 | '''begin an item in the format list''' |
|
174 | 174 | if self._item is not None: |
|
175 | 175 | self._showitem() |
|
176 | 176 | self._item = {} |
|
177 | 177 | def formatdate(self, date, fmt='%a %b %d %H:%M:%S %Y %1%2'): |
|
178 | 178 | '''convert date tuple to appropriate format''' |
|
179 | 179 | return self._converter.formatdate(date, fmt) |
|
180 | 180 | def formatdict(self, data, key='key', value='value', fmt=None, sep=' '): |
|
181 | 181 | '''convert dict or key-value pairs to appropriate dict format''' |
|
182 | 182 | return self._converter.formatdict(data, key, value, fmt, sep) |
|
183 | 183 | def formatlist(self, data, name, fmt=None, sep=' '): |
|
184 | 184 | '''convert iterable to appropriate list format''' |
|
185 | 185 | # name is mandatory argument for now, but it could be optional if |
|
186 | 186 | # we have default template keyword, e.g. {item} |
|
187 | 187 | return self._converter.formatlist(data, name, fmt, sep) |
|
188 | 188 | def context(self, **ctxs): |
|
189 | 189 | '''insert context objects to be used to render template keywords''' |
|
190 | 190 | ctxs = pycompat.byteskwargs(ctxs) |
|
191 | 191 | assert all(k in {'ctx', 'fctx'} for k in ctxs) |
|
192 | 192 | if self._converter.storecontext: |
|
193 | 193 | self._item.update(ctxs) |
|
194 | 194 | def data(self, **data): |
|
195 | 195 | '''insert data into item that's not shown in default output''' |
|
196 | 196 | data = pycompat.byteskwargs(data) |
|
197 | 197 | self._item.update(data) |
|
198 | 198 | def write(self, fields, deftext, *fielddata, **opts): |
|
199 | 199 | '''do default text output while assigning data to item''' |
|
200 | 200 | fieldkeys = fields.split() |
|
201 | 201 | assert len(fieldkeys) == len(fielddata) |
|
202 | 202 | self._item.update(zip(fieldkeys, fielddata)) |
|
203 | 203 | def condwrite(self, cond, fields, deftext, *fielddata, **opts): |
|
204 | 204 | '''do conditional write (primarily for plain formatter)''' |
|
205 | 205 | fieldkeys = fields.split() |
|
206 | 206 | assert len(fieldkeys) == len(fielddata) |
|
207 | 207 | self._item.update(zip(fieldkeys, fielddata)) |
|
208 | 208 | def plain(self, text, **opts): |
|
209 | 209 | '''show raw text for non-templated mode''' |
|
210 | 210 | def isplain(self): |
|
211 | 211 | '''check for plain formatter usage''' |
|
212 | 212 | return False |
|
213 | 213 | def nested(self, field): |
|
214 | 214 | '''sub formatter to store nested data in the specified field''' |
|
215 | 215 | self._item[field] = data = [] |
|
216 | 216 | return _nestedformatter(self._ui, self._converter, data) |
|
217 | 217 | def end(self): |
|
218 | 218 | '''end output for the formatter''' |
|
219 | 219 | if self._item is not None: |
|
220 | 220 | self._showitem() |
|
221 | 221 | |
|
222 | 222 | def nullformatter(ui, topic): |
|
223 | 223 | '''formatter that prints nothing''' |
|
224 | 224 | return baseformatter(ui, topic, opts={}, converter=_nullconverter) |
|
225 | 225 | |
|
226 | 226 | class _nestedformatter(baseformatter): |
|
227 | 227 | '''build sub items and store them in the parent formatter''' |
|
228 | 228 | def __init__(self, ui, converter, data): |
|
229 | 229 | baseformatter.__init__(self, ui, topic='', opts={}, converter=converter) |
|
230 | 230 | self._data = data |
|
231 | 231 | def _showitem(self): |
|
232 | 232 | self._data.append(self._item) |
|
233 | 233 | |
|
234 | 234 | def _iteritems(data): |
|
235 | 235 | '''iterate key-value pairs in stable order''' |
|
236 | 236 | if isinstance(data, dict): |
|
237 | 237 | return sorted(data.iteritems()) |
|
238 | 238 | return data |
|
239 | 239 | |
|
240 | 240 | class _plainconverter(object): |
|
241 | 241 | '''convert non-primitive data types to text''' |
|
242 | 242 | |
|
243 | 243 | storecontext = False |
|
244 | 244 | |
|
245 | 245 | @staticmethod |
|
246 | 246 | def formatdate(date, fmt): |
|
247 | 247 | '''stringify date tuple in the given format''' |
|
248 | 248 | return dateutil.datestr(date, fmt) |
|
249 | 249 | @staticmethod |
|
250 | 250 | def formatdict(data, key, value, fmt, sep): |
|
251 | 251 | '''stringify key-value pairs separated by sep''' |
|
252 | 252 | prefmt = pycompat.identity |
|
253 | 253 | if fmt is None: |
|
254 | 254 | fmt = '%s=%s' |
|
255 | 255 | prefmt = pycompat.bytestr |
|
256 | 256 | return sep.join(fmt % (prefmt(k), prefmt(v)) |
|
257 | 257 | for k, v in _iteritems(data)) |
|
258 | 258 | @staticmethod |
|
259 | 259 | def formatlist(data, name, fmt, sep): |
|
260 | 260 | '''stringify iterable separated by sep''' |
|
261 | 261 | prefmt = pycompat.identity |
|
262 | 262 | if fmt is None: |
|
263 | 263 | fmt = '%s' |
|
264 | 264 | prefmt = pycompat.bytestr |
|
265 | 265 | return sep.join(fmt % prefmt(e) for e in data) |
|
266 | 266 | |
|
267 | 267 | class plainformatter(baseformatter): |
|
268 | 268 | '''the default text output scheme''' |
|
269 | 269 | def __init__(self, ui, out, topic, opts): |
|
270 | 270 | baseformatter.__init__(self, ui, topic, opts, _plainconverter) |
|
271 | 271 | if ui.debugflag: |
|
272 | 272 | self.hexfunc = hex |
|
273 | 273 | else: |
|
274 | 274 | self.hexfunc = short |
|
275 | 275 | if ui is out: |
|
276 | 276 | self._write = ui.write |
|
277 | 277 | else: |
|
278 | 278 | self._write = lambda s, **opts: out.write(s) |
|
279 | 279 | def startitem(self): |
|
280 | 280 | pass |
|
281 | 281 | def data(self, **data): |
|
282 | 282 | pass |
|
283 | 283 | def write(self, fields, deftext, *fielddata, **opts): |
|
284 | 284 | self._write(deftext % fielddata, **opts) |
|
285 | 285 | def condwrite(self, cond, fields, deftext, *fielddata, **opts): |
|
286 | 286 | '''do conditional write''' |
|
287 | 287 | if cond: |
|
288 | 288 | self._write(deftext % fielddata, **opts) |
|
289 | 289 | def plain(self, text, **opts): |
|
290 | 290 | self._write(text, **opts) |
|
291 | 291 | def isplain(self): |
|
292 | 292 | return True |
|
293 | 293 | def nested(self, field): |
|
294 | 294 | # nested data will be directly written to ui |
|
295 | 295 | return self |
|
296 | 296 | def end(self): |
|
297 | 297 | pass |
|
298 | 298 | |
|
299 | 299 | class debugformatter(baseformatter): |
|
300 | 300 | def __init__(self, ui, out, topic, opts): |
|
301 | 301 | baseformatter.__init__(self, ui, topic, opts, _nullconverter) |
|
302 | 302 | self._out = out |
|
303 | 303 | self._out.write("%s = [\n" % self._topic) |
|
304 | 304 | def _showitem(self): |
|
305 | 305 | self._out.write(' %s,\n' % pycompat.byterepr(self._item)) |
|
306 | 306 | def end(self): |
|
307 | 307 | baseformatter.end(self) |
|
308 | 308 | self._out.write("]\n") |
|
309 | 309 | |
|
310 | 310 | class pickleformatter(baseformatter): |
|
311 | 311 | def __init__(self, ui, out, topic, opts): |
|
312 | 312 | baseformatter.__init__(self, ui, topic, opts, _nullconverter) |
|
313 | 313 | self._out = out |
|
314 | 314 | self._data = [] |
|
315 | 315 | def _showitem(self): |
|
316 | 316 | self._data.append(self._item) |
|
317 | 317 | def end(self): |
|
318 | 318 | baseformatter.end(self) |
|
319 | 319 | self._out.write(pickle.dumps(self._data)) |
|
320 | 320 | |
|
321 | 321 | class jsonformatter(baseformatter): |
|
322 | 322 | def __init__(self, ui, out, topic, opts): |
|
323 | 323 | baseformatter.__init__(self, ui, topic, opts, _nullconverter) |
|
324 | 324 | self._out = out |
|
325 | 325 | self._out.write("[") |
|
326 | 326 | self._first = True |
|
327 | 327 | def _showitem(self): |
|
328 | 328 | if self._first: |
|
329 | 329 | self._first = False |
|
330 | 330 | else: |
|
331 | 331 | self._out.write(",") |
|
332 | 332 | |
|
333 | 333 | self._out.write("\n {\n") |
|
334 | 334 | first = True |
|
335 | 335 | for k, v in sorted(self._item.items()): |
|
336 | 336 | if first: |
|
337 | 337 | first = False |
|
338 | 338 | else: |
|
339 | 339 | self._out.write(",\n") |
|
340 | 340 | u = templatefilters.json(v, paranoid=False) |
|
341 | 341 | self._out.write(' "%s": %s' % (k, u)) |
|
342 | 342 | self._out.write("\n }") |
|
343 | 343 | def end(self): |
|
344 | 344 | baseformatter.end(self) |
|
345 | 345 | self._out.write("\n]\n") |
|
346 | 346 | |
|
347 | 347 | class _templateconverter(object): |
|
348 | 348 | '''convert non-primitive data types to be processed by templater''' |
|
349 | 349 | |
|
350 | 350 | storecontext = True |
|
351 | 351 | |
|
352 | 352 | @staticmethod |
|
353 | 353 | def formatdate(date, fmt): |
|
354 | 354 | '''return date tuple''' |
|
355 | 355 | return date |
|
356 | 356 | @staticmethod |
|
357 | 357 | def formatdict(data, key, value, fmt, sep): |
|
358 | 358 | '''build object that can be evaluated as either plain string or dict''' |
|
359 | 359 | data = util.sortdict(_iteritems(data)) |
|
360 | 360 | def f(): |
|
361 | 361 | yield _plainconverter.formatdict(data, key, value, fmt, sep) |
|
362 | 362 | return templateutil.hybriddict(data, key=key, value=value, fmt=fmt, |
|
363 | 363 | gen=f) |
|
364 | 364 | @staticmethod |
|
365 | 365 | def formatlist(data, name, fmt, sep): |
|
366 | 366 | '''build object that can be evaluated as either plain string or list''' |
|
367 | 367 | data = list(data) |
|
368 | 368 | def f(): |
|
369 | 369 | yield _plainconverter.formatlist(data, name, fmt, sep) |
|
370 | 370 | return templateutil.hybridlist(data, name=name, fmt=fmt, gen=f) |
|
371 | 371 | |
|
372 | 372 | class templateformatter(baseformatter): |
|
373 | 373 | def __init__(self, ui, out, topic, opts): |
|
374 | 374 | baseformatter.__init__(self, ui, topic, opts, _templateconverter) |
|
375 | 375 | self._out = out |
|
376 | 376 | spec = lookuptemplate(ui, topic, opts.get('template', '')) |
|
377 | 377 | self._tref = spec.ref |
|
378 | 378 | self._t = loadtemplater(ui, spec, defaults=templatekw.keywords, |
|
379 | 379 | resources=templateresources(ui), |
|
380 | 380 | cache=templatekw.defaulttempl) |
|
381 | 381 | self._parts = templatepartsmap(spec, self._t, |
|
382 | 382 | ['docheader', 'docfooter', 'separator']) |
|
383 | 383 | self._counter = itertools.count() |
|
384 | 384 | self._renderitem('docheader', {}) |
|
385 | 385 | |
|
386 | 386 | def _showitem(self): |
|
387 | 387 | item = self._item.copy() |
|
388 | 388 | item['index'] = index = next(self._counter) |
|
389 | 389 | if index > 0: |
|
390 | 390 | self._renderitem('separator', {}) |
|
391 | 391 | self._renderitem(self._tref, item) |
|
392 | 392 | |
|
393 | 393 | def _renderitem(self, part, item): |
|
394 | 394 | if part not in self._parts: |
|
395 | 395 | return |
|
396 | 396 | ref = self._parts[part] |
|
397 | 397 | |
|
398 | 398 | props = {} |
|
399 | 399 | # explicitly-defined fields precede templatekw |
|
400 | 400 | props.update(item) |
|
401 | 401 | if 'ctx' in item or 'fctx' in item: |
|
402 | 402 | # but template resources must be always available |
|
403 | 403 | props['revcache'] = {} |
|
404 | props = pycompat.strkwargs(props) | |
|
405 | g = self._t(ref, **props) | |
|
406 | self._out.write(templateutil.stringify(g)) | |
|
404 | self._out.write(self._t.render(ref, props)) | |
|
407 | 405 | |
|
408 | 406 | def end(self): |
|
409 | 407 | baseformatter.end(self) |
|
410 | 408 | self._renderitem('docfooter', {}) |
|
411 | 409 | |
|
412 | 410 | templatespec = collections.namedtuple(r'templatespec', |
|
413 | 411 | r'ref tmpl mapfile') |
|
414 | 412 | |
|
415 | 413 | def lookuptemplate(ui, topic, tmpl): |
|
416 | 414 | """Find the template matching the given -T/--template spec 'tmpl' |
|
417 | 415 | |
|
418 | 416 | 'tmpl' can be any of the following: |
|
419 | 417 | |
|
420 | 418 | - a literal template (e.g. '{rev}') |
|
421 | 419 | - a map-file name or path (e.g. 'changelog') |
|
422 | 420 | - a reference to [templates] in config file |
|
423 | 421 | - a path to raw template file |
|
424 | 422 | |
|
425 | 423 | A map file defines a stand-alone template environment. If a map file |
|
426 | 424 | selected, all templates defined in the file will be loaded, and the |
|
427 | 425 | template matching the given topic will be rendered. Aliases won't be |
|
428 | 426 | loaded from user config, but from the map file. |
|
429 | 427 | |
|
430 | 428 | If no map file selected, all templates in [templates] section will be |
|
431 | 429 | available as well as aliases in [templatealias]. |
|
432 | 430 | """ |
|
433 | 431 | |
|
434 | 432 | # looks like a literal template? |
|
435 | 433 | if '{' in tmpl: |
|
436 | 434 | return templatespec('', tmpl, None) |
|
437 | 435 | |
|
438 | 436 | # perhaps a stock style? |
|
439 | 437 | if not os.path.split(tmpl)[0]: |
|
440 | 438 | mapname = (templater.templatepath('map-cmdline.' + tmpl) |
|
441 | 439 | or templater.templatepath(tmpl)) |
|
442 | 440 | if mapname and os.path.isfile(mapname): |
|
443 | 441 | return templatespec(topic, None, mapname) |
|
444 | 442 | |
|
445 | 443 | # perhaps it's a reference to [templates] |
|
446 | 444 | if ui.config('templates', tmpl): |
|
447 | 445 | return templatespec(tmpl, None, None) |
|
448 | 446 | |
|
449 | 447 | if tmpl == 'list': |
|
450 | 448 | ui.write(_("available styles: %s\n") % templater.stylelist()) |
|
451 | 449 | raise error.Abort(_("specify a template")) |
|
452 | 450 | |
|
453 | 451 | # perhaps it's a path to a map or a template |
|
454 | 452 | if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl): |
|
455 | 453 | # is it a mapfile for a style? |
|
456 | 454 | if os.path.basename(tmpl).startswith("map-"): |
|
457 | 455 | return templatespec(topic, None, os.path.realpath(tmpl)) |
|
458 | 456 | with util.posixfile(tmpl, 'rb') as f: |
|
459 | 457 | tmpl = f.read() |
|
460 | 458 | return templatespec('', tmpl, None) |
|
461 | 459 | |
|
462 | 460 | # constant string? |
|
463 | 461 | return templatespec('', tmpl, None) |
|
464 | 462 | |
|
465 | 463 | def templatepartsmap(spec, t, partnames): |
|
466 | 464 | """Create a mapping of {part: ref}""" |
|
467 | 465 | partsmap = {spec.ref: spec.ref} # initial ref must exist in t |
|
468 | 466 | if spec.mapfile: |
|
469 | 467 | partsmap.update((p, p) for p in partnames if p in t) |
|
470 | 468 | elif spec.ref: |
|
471 | 469 | for part in partnames: |
|
472 | 470 | ref = '%s:%s' % (spec.ref, part) # select config sub-section |
|
473 | 471 | if ref in t: |
|
474 | 472 | partsmap[part] = ref |
|
475 | 473 | return partsmap |
|
476 | 474 | |
|
477 | 475 | def loadtemplater(ui, spec, defaults=None, resources=None, cache=None): |
|
478 | 476 | """Create a templater from either a literal template or loading from |
|
479 | 477 | a map file""" |
|
480 | 478 | assert not (spec.tmpl and spec.mapfile) |
|
481 | 479 | if spec.mapfile: |
|
482 | 480 | frommapfile = templater.templater.frommapfile |
|
483 | 481 | return frommapfile(spec.mapfile, defaults=defaults, resources=resources, |
|
484 | 482 | cache=cache) |
|
485 | 483 | return maketemplater(ui, spec.tmpl, defaults=defaults, resources=resources, |
|
486 | 484 | cache=cache) |
|
487 | 485 | |
|
488 | 486 | def maketemplater(ui, tmpl, defaults=None, resources=None, cache=None): |
|
489 | 487 | """Create a templater from a string template 'tmpl'""" |
|
490 | 488 | aliases = ui.configitems('templatealias') |
|
491 | 489 | t = templater.templater(defaults=defaults, resources=resources, |
|
492 | 490 | cache=cache, aliases=aliases) |
|
493 | 491 | t.cache.update((k, templater.unquotestring(v)) |
|
494 | 492 | for k, v in ui.configitems('templates')) |
|
495 | 493 | if tmpl: |
|
496 | 494 | t.cache[''] = tmpl |
|
497 | 495 | return t |
|
498 | 496 | |
|
499 | 497 | def templateresources(ui, repo=None): |
|
500 | 498 | """Create a dict of template resources designed for the default templatekw |
|
501 | 499 | and function""" |
|
502 | 500 | resmap = { |
|
503 | 501 | 'cache': {}, # for templatekw/funcs to store reusable data |
|
504 | 502 | 'repo': repo, |
|
505 | 503 | 'ui': ui, |
|
506 | 504 | } |
|
507 | 505 | |
|
508 | 506 | def getsome(context, mapping, key): |
|
509 | 507 | v = mapping.get(key) |
|
510 | 508 | if v is not None: |
|
511 | 509 | return v |
|
512 | 510 | return resmap.get(key) |
|
513 | 511 | |
|
514 | 512 | def getctx(context, mapping, key): |
|
515 | 513 | ctx = mapping.get('ctx') |
|
516 | 514 | if ctx is not None: |
|
517 | 515 | return ctx |
|
518 | 516 | fctx = mapping.get('fctx') |
|
519 | 517 | if fctx is not None: |
|
520 | 518 | return fctx.changectx() |
|
521 | 519 | |
|
522 | 520 | def getrepo(context, mapping, key): |
|
523 | 521 | ctx = getctx(context, mapping, 'ctx') |
|
524 | 522 | if ctx is not None: |
|
525 | 523 | return ctx.repo() |
|
526 | 524 | return getsome(context, mapping, key) |
|
527 | 525 | |
|
528 | 526 | return { |
|
529 | 527 | 'cache': getsome, |
|
530 | 528 | 'ctx': getctx, |
|
531 | 529 | 'fctx': getsome, |
|
532 | 530 | 'repo': getrepo, |
|
533 | 531 | 'revcache': getsome, # per-ctx cache; set later |
|
534 | 532 | 'ui': getsome, |
|
535 | 533 | } |
|
536 | 534 | |
|
537 | 535 | def formatter(ui, out, topic, opts): |
|
538 | 536 | template = opts.get("template", "") |
|
539 | 537 | if template == "json": |
|
540 | 538 | return jsonformatter(ui, out, topic, opts) |
|
541 | 539 | elif template == "pickle": |
|
542 | 540 | return pickleformatter(ui, out, topic, opts) |
|
543 | 541 | elif template == "debug": |
|
544 | 542 | return debugformatter(ui, out, topic, opts) |
|
545 | 543 | elif template != "": |
|
546 | 544 | return templateformatter(ui, out, topic, opts) |
|
547 | 545 | # developer config: ui.formatdebug |
|
548 | 546 | elif ui.configbool('ui', 'formatdebug'): |
|
549 | 547 | return debugformatter(ui, out, topic, opts) |
|
550 | 548 | # deprecated config: ui.formatjson |
|
551 | 549 | elif ui.configbool('ui', 'formatjson'): |
|
552 | 550 | return jsonformatter(ui, out, topic, opts) |
|
553 | 551 | return plainformatter(ui, out, topic, opts) |
|
554 | 552 | |
|
555 | 553 | @contextlib.contextmanager |
|
556 | 554 | def openformatter(ui, filename, topic, opts): |
|
557 | 555 | """Create a formatter that writes outputs to the specified file |
|
558 | 556 | |
|
559 | 557 | Must be invoked using the 'with' statement. |
|
560 | 558 | """ |
|
561 | 559 | with util.posixfile(filename, 'wb') as out: |
|
562 | 560 | with formatter(ui, out, topic, opts) as fm: |
|
563 | 561 | yield fm |
|
564 | 562 | |
|
565 | 563 | @contextlib.contextmanager |
|
566 | 564 | def _neverending(fm): |
|
567 | 565 | yield fm |
|
568 | 566 | |
|
569 | 567 | def maybereopen(fm, filename, opts): |
|
570 | 568 | """Create a formatter backed by file if filename specified, else return |
|
571 | 569 | the given formatter |
|
572 | 570 | |
|
573 | 571 | Must be invoked using the 'with' statement. This will never call fm.end() |
|
574 | 572 | of the given formatter. |
|
575 | 573 | """ |
|
576 | 574 | if filename: |
|
577 | 575 | return openformatter(fm._ui, filename, fm._topic, opts) |
|
578 | 576 | else: |
|
579 | 577 | return _neverending(fm) |
@@ -1,454 +1,453 b'' | |||
|
1 | 1 | # hgweb/hgweb_mod.py - Web interface for a repository. |
|
2 | 2 | # |
|
3 | 3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
4 | 4 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
5 | 5 | # |
|
6 | 6 | # This software may be used and distributed according to the terms of the |
|
7 | 7 | # GNU General Public License version 2 or any later version. |
|
8 | 8 | |
|
9 | 9 | from __future__ import absolute_import |
|
10 | 10 | |
|
11 | 11 | import contextlib |
|
12 | 12 | import os |
|
13 | 13 | |
|
14 | 14 | from .common import ( |
|
15 | 15 | ErrorResponse, |
|
16 | 16 | HTTP_BAD_REQUEST, |
|
17 | 17 | cspvalues, |
|
18 | 18 | permhooks, |
|
19 | 19 | statusmessage, |
|
20 | 20 | ) |
|
21 | 21 | |
|
22 | 22 | from .. import ( |
|
23 | 23 | encoding, |
|
24 | 24 | error, |
|
25 | 25 | formatter, |
|
26 | 26 | hg, |
|
27 | 27 | hook, |
|
28 | 28 | profiling, |
|
29 | 29 | pycompat, |
|
30 | 30 | repoview, |
|
31 | 31 | templatefilters, |
|
32 | 32 | templater, |
|
33 | templateutil, | |
|
34 | 33 | ui as uimod, |
|
35 | 34 | util, |
|
36 | 35 | wireprotoserver, |
|
37 | 36 | ) |
|
38 | 37 | |
|
39 | 38 | from . import ( |
|
40 | 39 | request as requestmod, |
|
41 | 40 | webcommands, |
|
42 | 41 | webutil, |
|
43 | 42 | wsgicgi, |
|
44 | 43 | ) |
|
45 | 44 | |
|
46 | 45 | archivespecs = util.sortdict(( |
|
47 | 46 | ('zip', ('application/zip', 'zip', '.zip', None)), |
|
48 | 47 | ('gz', ('application/x-gzip', 'tgz', '.tar.gz', None)), |
|
49 | 48 | ('bz2', ('application/x-bzip2', 'tbz2', '.tar.bz2', None)), |
|
50 | 49 | )) |
|
51 | 50 | |
|
52 | 51 | def getstyle(req, configfn, templatepath): |
|
53 | 52 | styles = ( |
|
54 | 53 | req.qsparams.get('style', None), |
|
55 | 54 | configfn('web', 'style'), |
|
56 | 55 | 'paper', |
|
57 | 56 | ) |
|
58 | 57 | return styles, templater.stylemap(styles, templatepath) |
|
59 | 58 | |
|
60 | 59 | def makebreadcrumb(url, prefix=''): |
|
61 | 60 | '''Return a 'URL breadcrumb' list |
|
62 | 61 | |
|
63 | 62 | A 'URL breadcrumb' is a list of URL-name pairs, |
|
64 | 63 | corresponding to each of the path items on a URL. |
|
65 | 64 | This can be used to create path navigation entries. |
|
66 | 65 | ''' |
|
67 | 66 | if url.endswith('/'): |
|
68 | 67 | url = url[:-1] |
|
69 | 68 | if prefix: |
|
70 | 69 | url = '/' + prefix + url |
|
71 | 70 | relpath = url |
|
72 | 71 | if relpath.startswith('/'): |
|
73 | 72 | relpath = relpath[1:] |
|
74 | 73 | |
|
75 | 74 | breadcrumb = [] |
|
76 | 75 | urlel = url |
|
77 | 76 | pathitems = [''] + relpath.split('/') |
|
78 | 77 | for pathel in reversed(pathitems): |
|
79 | 78 | if not pathel or not urlel: |
|
80 | 79 | break |
|
81 | 80 | breadcrumb.append({'url': urlel, 'name': pathel}) |
|
82 | 81 | urlel = os.path.dirname(urlel) |
|
83 | 82 | return reversed(breadcrumb) |
|
84 | 83 | |
|
85 | 84 | class requestcontext(object): |
|
86 | 85 | """Holds state/context for an individual request. |
|
87 | 86 | |
|
88 | 87 | Servers can be multi-threaded. Holding state on the WSGI application |
|
89 | 88 | is prone to race conditions. Instances of this class exist to hold |
|
90 | 89 | mutable and race-free state for requests. |
|
91 | 90 | """ |
|
92 | 91 | def __init__(self, app, repo, req, res): |
|
93 | 92 | self.repo = repo |
|
94 | 93 | self.reponame = app.reponame |
|
95 | 94 | self.req = req |
|
96 | 95 | self.res = res |
|
97 | 96 | |
|
98 | 97 | self.archivespecs = archivespecs |
|
99 | 98 | |
|
100 | 99 | self.maxchanges = self.configint('web', 'maxchanges') |
|
101 | 100 | self.stripecount = self.configint('web', 'stripes') |
|
102 | 101 | self.maxshortchanges = self.configint('web', 'maxshortchanges') |
|
103 | 102 | self.maxfiles = self.configint('web', 'maxfiles') |
|
104 | 103 | self.allowpull = self.configbool('web', 'allow-pull') |
|
105 | 104 | |
|
106 | 105 | # we use untrusted=False to prevent a repo owner from using |
|
107 | 106 | # web.templates in .hg/hgrc to get access to any file readable |
|
108 | 107 | # by the user running the CGI script |
|
109 | 108 | self.templatepath = self.config('web', 'templates', untrusted=False) |
|
110 | 109 | |
|
111 | 110 | # This object is more expensive to build than simple config values. |
|
112 | 111 | # It is shared across requests. The app will replace the object |
|
113 | 112 | # if it is updated. Since this is a reference and nothing should |
|
114 | 113 | # modify the underlying object, it should be constant for the lifetime |
|
115 | 114 | # of the request. |
|
116 | 115 | self.websubtable = app.websubtable |
|
117 | 116 | |
|
118 | 117 | self.csp, self.nonce = cspvalues(self.repo.ui) |
|
119 | 118 | |
|
120 | 119 | # Trust the settings from the .hg/hgrc files by default. |
|
121 | 120 | def config(self, section, name, default=uimod._unset, untrusted=True): |
|
122 | 121 | return self.repo.ui.config(section, name, default, |
|
123 | 122 | untrusted=untrusted) |
|
124 | 123 | |
|
125 | 124 | def configbool(self, section, name, default=uimod._unset, untrusted=True): |
|
126 | 125 | return self.repo.ui.configbool(section, name, default, |
|
127 | 126 | untrusted=untrusted) |
|
128 | 127 | |
|
129 | 128 | def configint(self, section, name, default=uimod._unset, untrusted=True): |
|
130 | 129 | return self.repo.ui.configint(section, name, default, |
|
131 | 130 | untrusted=untrusted) |
|
132 | 131 | |
|
133 | 132 | def configlist(self, section, name, default=uimod._unset, untrusted=True): |
|
134 | 133 | return self.repo.ui.configlist(section, name, default, |
|
135 | 134 | untrusted=untrusted) |
|
136 | 135 | |
|
137 | 136 | def archivelist(self, nodeid): |
|
138 | 137 | allowed = self.configlist('web', 'allow_archive') |
|
139 | 138 | for typ, spec in self.archivespecs.iteritems(): |
|
140 | 139 | if typ in allowed or self.configbool('web', 'allow%s' % typ): |
|
141 | 140 | yield {'type': typ, 'extension': spec[2], 'node': nodeid} |
|
142 | 141 | |
|
143 | 142 | def templater(self, req): |
|
144 | 143 | # determine scheme, port and server name |
|
145 | 144 | # this is needed to create absolute urls |
|
146 | 145 | logourl = self.config('web', 'logourl') |
|
147 | 146 | logoimg = self.config('web', 'logoimg') |
|
148 | 147 | staticurl = (self.config('web', 'staticurl') |
|
149 | 148 | or req.apppath + '/static/') |
|
150 | 149 | if not staticurl.endswith('/'): |
|
151 | 150 | staticurl += '/' |
|
152 | 151 | |
|
153 | 152 | # some functions for the templater |
|
154 | 153 | |
|
155 | 154 | def motd(**map): |
|
156 | 155 | yield self.config('web', 'motd') |
|
157 | 156 | |
|
158 | 157 | # figure out which style to use |
|
159 | 158 | |
|
160 | 159 | vars = {} |
|
161 | 160 | styles, (style, mapfile) = getstyle(req, self.config, |
|
162 | 161 | self.templatepath) |
|
163 | 162 | if style == styles[0]: |
|
164 | 163 | vars['style'] = style |
|
165 | 164 | |
|
166 | 165 | sessionvars = webutil.sessionvars(vars, '?') |
|
167 | 166 | |
|
168 | 167 | if not self.reponame: |
|
169 | 168 | self.reponame = (self.config('web', 'name', '') |
|
170 | 169 | or req.reponame |
|
171 | 170 | or req.apppath |
|
172 | 171 | or self.repo.root) |
|
173 | 172 | |
|
174 | 173 | def websubfilter(text): |
|
175 | 174 | return templatefilters.websub(text, self.websubtable) |
|
176 | 175 | |
|
177 | 176 | # create the templater |
|
178 | 177 | # TODO: export all keywords: defaults = templatekw.keywords.copy() |
|
179 | 178 | defaults = { |
|
180 | 179 | 'url': req.apppath + '/', |
|
181 | 180 | 'logourl': logourl, |
|
182 | 181 | 'logoimg': logoimg, |
|
183 | 182 | 'staticurl': staticurl, |
|
184 | 183 | 'urlbase': req.advertisedbaseurl, |
|
185 | 184 | 'repo': self.reponame, |
|
186 | 185 | 'encoding': encoding.encoding, |
|
187 | 186 | 'motd': motd, |
|
188 | 187 | 'sessionvars': sessionvars, |
|
189 | 188 | 'pathdef': makebreadcrumb(req.apppath), |
|
190 | 189 | 'style': style, |
|
191 | 190 | 'nonce': self.nonce, |
|
192 | 191 | } |
|
193 | 192 | tres = formatter.templateresources(self.repo.ui, self.repo) |
|
194 | 193 | tmpl = templater.templater.frommapfile(mapfile, |
|
195 | 194 | filters={'websub': websubfilter}, |
|
196 | 195 | defaults=defaults, |
|
197 | 196 | resources=tres) |
|
198 | 197 | return tmpl |
|
199 | 198 | |
|
200 | 199 | def sendtemplate(self, name, **kwargs): |
|
201 | 200 | """Helper function to send a response generated from a template.""" |
|
202 | 201 | self.res.setbodygen(self.tmpl(name, **kwargs)) |
|
203 | 202 | return self.res.sendresponse() |
|
204 | 203 | |
|
205 | 204 | class hgweb(object): |
|
206 | 205 | """HTTP server for individual repositories. |
|
207 | 206 | |
|
208 | 207 | Instances of this class serve HTTP responses for a particular |
|
209 | 208 | repository. |
|
210 | 209 | |
|
211 | 210 | Instances are typically used as WSGI applications. |
|
212 | 211 | |
|
213 | 212 | Some servers are multi-threaded. On these servers, there may |
|
214 | 213 | be multiple active threads inside __call__. |
|
215 | 214 | """ |
|
216 | 215 | def __init__(self, repo, name=None, baseui=None): |
|
217 | 216 | if isinstance(repo, str): |
|
218 | 217 | if baseui: |
|
219 | 218 | u = baseui.copy() |
|
220 | 219 | else: |
|
221 | 220 | u = uimod.ui.load() |
|
222 | 221 | r = hg.repository(u, repo) |
|
223 | 222 | else: |
|
224 | 223 | # we trust caller to give us a private copy |
|
225 | 224 | r = repo |
|
226 | 225 | |
|
227 | 226 | r.ui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') |
|
228 | 227 | r.baseui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') |
|
229 | 228 | r.ui.setconfig('ui', 'nontty', 'true', 'hgweb') |
|
230 | 229 | r.baseui.setconfig('ui', 'nontty', 'true', 'hgweb') |
|
231 | 230 | # resolve file patterns relative to repo root |
|
232 | 231 | r.ui.setconfig('ui', 'forcecwd', r.root, 'hgweb') |
|
233 | 232 | r.baseui.setconfig('ui', 'forcecwd', r.root, 'hgweb') |
|
234 | 233 | # displaying bundling progress bar while serving feel wrong and may |
|
235 | 234 | # break some wsgi implementation. |
|
236 | 235 | r.ui.setconfig('progress', 'disable', 'true', 'hgweb') |
|
237 | 236 | r.baseui.setconfig('progress', 'disable', 'true', 'hgweb') |
|
238 | 237 | self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))] |
|
239 | 238 | self._lastrepo = self._repos[0] |
|
240 | 239 | hook.redirect(True) |
|
241 | 240 | self.reponame = name |
|
242 | 241 | |
|
243 | 242 | def _webifyrepo(self, repo): |
|
244 | 243 | repo = getwebview(repo) |
|
245 | 244 | self.websubtable = webutil.getwebsubs(repo) |
|
246 | 245 | return repo |
|
247 | 246 | |
|
248 | 247 | @contextlib.contextmanager |
|
249 | 248 | def _obtainrepo(self): |
|
250 | 249 | """Obtain a repo unique to the caller. |
|
251 | 250 | |
|
252 | 251 | Internally we maintain a stack of cachedlocalrepo instances |
|
253 | 252 | to be handed out. If one is available, we pop it and return it, |
|
254 | 253 | ensuring it is up to date in the process. If one is not available, |
|
255 | 254 | we clone the most recently used repo instance and return it. |
|
256 | 255 | |
|
257 | 256 | It is currently possible for the stack to grow without bounds |
|
258 | 257 | if the server allows infinite threads. However, servers should |
|
259 | 258 | have a thread limit, thus establishing our limit. |
|
260 | 259 | """ |
|
261 | 260 | if self._repos: |
|
262 | 261 | cached = self._repos.pop() |
|
263 | 262 | r, created = cached.fetch() |
|
264 | 263 | else: |
|
265 | 264 | cached = self._lastrepo.copy() |
|
266 | 265 | r, created = cached.fetch() |
|
267 | 266 | if created: |
|
268 | 267 | r = self._webifyrepo(r) |
|
269 | 268 | |
|
270 | 269 | self._lastrepo = cached |
|
271 | 270 | self.mtime = cached.mtime |
|
272 | 271 | try: |
|
273 | 272 | yield r |
|
274 | 273 | finally: |
|
275 | 274 | self._repos.append(cached) |
|
276 | 275 | |
|
277 | 276 | def run(self): |
|
278 | 277 | """Start a server from CGI environment. |
|
279 | 278 | |
|
280 | 279 | Modern servers should be using WSGI and should avoid this |
|
281 | 280 | method, if possible. |
|
282 | 281 | """ |
|
283 | 282 | if not encoding.environ.get('GATEWAY_INTERFACE', |
|
284 | 283 | '').startswith("CGI/1."): |
|
285 | 284 | raise RuntimeError("This function is only intended to be " |
|
286 | 285 | "called while running as a CGI script.") |
|
287 | 286 | wsgicgi.launch(self) |
|
288 | 287 | |
|
289 | 288 | def __call__(self, env, respond): |
|
290 | 289 | """Run the WSGI application. |
|
291 | 290 | |
|
292 | 291 | This may be called by multiple threads. |
|
293 | 292 | """ |
|
294 | 293 | req = requestmod.parserequestfromenv(env) |
|
295 | 294 | res = requestmod.wsgiresponse(req, respond) |
|
296 | 295 | |
|
297 | 296 | return self.run_wsgi(req, res) |
|
298 | 297 | |
|
299 | 298 | def run_wsgi(self, req, res): |
|
300 | 299 | """Internal method to run the WSGI application. |
|
301 | 300 | |
|
302 | 301 | This is typically only called by Mercurial. External consumers |
|
303 | 302 | should be using instances of this class as the WSGI application. |
|
304 | 303 | """ |
|
305 | 304 | with self._obtainrepo() as repo: |
|
306 | 305 | profile = repo.ui.configbool('profiling', 'enabled') |
|
307 | 306 | with profiling.profile(repo.ui, enabled=profile): |
|
308 | 307 | for r in self._runwsgi(req, res, repo): |
|
309 | 308 | yield r |
|
310 | 309 | |
|
311 | 310 | def _runwsgi(self, req, res, repo): |
|
312 | 311 | rctx = requestcontext(self, repo, req, res) |
|
313 | 312 | |
|
314 | 313 | # This state is global across all threads. |
|
315 | 314 | encoding.encoding = rctx.config('web', 'encoding') |
|
316 | 315 | rctx.repo.ui.environ = req.rawenv |
|
317 | 316 | |
|
318 | 317 | if rctx.csp: |
|
319 | 318 | # hgwebdir may have added CSP header. Since we generate our own, |
|
320 | 319 | # replace it. |
|
321 | 320 | res.headers['Content-Security-Policy'] = rctx.csp |
|
322 | 321 | |
|
323 | 322 | handled = wireprotoserver.handlewsgirequest( |
|
324 | 323 | rctx, req, res, self.check_perm) |
|
325 | 324 | if handled: |
|
326 | 325 | return res.sendresponse() |
|
327 | 326 | |
|
328 | 327 | # Old implementations of hgweb supported dispatching the request via |
|
329 | 328 | # the initial query string parameter instead of using PATH_INFO. |
|
330 | 329 | # If PATH_INFO is present (signaled by ``req.dispatchpath`` having |
|
331 | 330 | # a value), we use it. Otherwise fall back to the query string. |
|
332 | 331 | if req.dispatchpath is not None: |
|
333 | 332 | query = req.dispatchpath |
|
334 | 333 | else: |
|
335 | 334 | query = req.querystring.partition('&')[0].partition(';')[0] |
|
336 | 335 | |
|
337 | 336 | # translate user-visible url structure to internal structure |
|
338 | 337 | |
|
339 | 338 | args = query.split('/', 2) |
|
340 | 339 | if 'cmd' not in req.qsparams and args and args[0]: |
|
341 | 340 | cmd = args.pop(0) |
|
342 | 341 | style = cmd.rfind('-') |
|
343 | 342 | if style != -1: |
|
344 | 343 | req.qsparams['style'] = cmd[:style] |
|
345 | 344 | cmd = cmd[style + 1:] |
|
346 | 345 | |
|
347 | 346 | # avoid accepting e.g. style parameter as command |
|
348 | 347 | if util.safehasattr(webcommands, cmd): |
|
349 | 348 | req.qsparams['cmd'] = cmd |
|
350 | 349 | |
|
351 | 350 | if cmd == 'static': |
|
352 | 351 | req.qsparams['file'] = '/'.join(args) |
|
353 | 352 | else: |
|
354 | 353 | if args and args[0]: |
|
355 | 354 | node = args.pop(0).replace('%2F', '/') |
|
356 | 355 | req.qsparams['node'] = node |
|
357 | 356 | if args: |
|
358 | 357 | if 'file' in req.qsparams: |
|
359 | 358 | del req.qsparams['file'] |
|
360 | 359 | for a in args: |
|
361 | 360 | req.qsparams.add('file', a) |
|
362 | 361 | |
|
363 | 362 | ua = req.headers.get('User-Agent', '') |
|
364 | 363 | if cmd == 'rev' and 'mercurial' in ua: |
|
365 | 364 | req.qsparams['style'] = 'raw' |
|
366 | 365 | |
|
367 | 366 | if cmd == 'archive': |
|
368 | 367 | fn = req.qsparams['node'] |
|
369 | 368 | for type_, spec in rctx.archivespecs.iteritems(): |
|
370 | 369 | ext = spec[2] |
|
371 | 370 | if fn.endswith(ext): |
|
372 | 371 | req.qsparams['node'] = fn[:-len(ext)] |
|
373 | 372 | req.qsparams['type'] = type_ |
|
374 | 373 | else: |
|
375 | 374 | cmd = req.qsparams.get('cmd', '') |
|
376 | 375 | |
|
377 | 376 | # process the web interface request |
|
378 | 377 | |
|
379 | 378 | try: |
|
380 | 379 | rctx.tmpl = rctx.templater(req) |
|
381 |
ctype = rctx.tmpl('mimetype', |
|
|
382 | ctype = templateutil.stringify(ctype) | |
|
380 | ctype = rctx.tmpl.render('mimetype', | |
|
381 | {'encoding': encoding.encoding}) | |
|
383 | 382 | |
|
384 | 383 | # check read permissions non-static content |
|
385 | 384 | if cmd != 'static': |
|
386 | 385 | self.check_perm(rctx, req, None) |
|
387 | 386 | |
|
388 | 387 | if cmd == '': |
|
389 | 388 | req.qsparams['cmd'] = rctx.tmpl.cache['default'] |
|
390 | 389 | cmd = req.qsparams['cmd'] |
|
391 | 390 | |
|
392 | 391 | # Don't enable caching if using a CSP nonce because then it wouldn't |
|
393 | 392 | # be a nonce. |
|
394 | 393 | if rctx.configbool('web', 'cache') and not rctx.nonce: |
|
395 | 394 | tag = 'W/"%d"' % self.mtime |
|
396 | 395 | if req.headers.get('If-None-Match') == tag: |
|
397 | 396 | res.status = '304 Not Modified' |
|
398 | 397 | # Response body not allowed on 304. |
|
399 | 398 | res.setbodybytes('') |
|
400 | 399 | return res.sendresponse() |
|
401 | 400 | |
|
402 | 401 | res.headers['ETag'] = tag |
|
403 | 402 | |
|
404 | 403 | if cmd not in webcommands.__all__: |
|
405 | 404 | msg = 'no such method: %s' % cmd |
|
406 | 405 | raise ErrorResponse(HTTP_BAD_REQUEST, msg) |
|
407 | 406 | else: |
|
408 | 407 | # Set some globals appropriate for web handlers. Commands can |
|
409 | 408 | # override easily enough. |
|
410 | 409 | res.status = '200 Script output follows' |
|
411 | 410 | res.headers['Content-Type'] = ctype |
|
412 | 411 | return getattr(webcommands, cmd)(rctx) |
|
413 | 412 | |
|
414 | 413 | except (error.LookupError, error.RepoLookupError) as err: |
|
415 | 414 | msg = pycompat.bytestr(err) |
|
416 | 415 | if (util.safehasattr(err, 'name') and |
|
417 | 416 | not isinstance(err, error.ManifestLookupError)): |
|
418 | 417 | msg = 'revision not found: %s' % err.name |
|
419 | 418 | |
|
420 | 419 | res.status = '404 Not Found' |
|
421 | 420 | res.headers['Content-Type'] = ctype |
|
422 | 421 | return rctx.sendtemplate('error', error=msg) |
|
423 | 422 | except (error.RepoError, error.RevlogError) as e: |
|
424 | 423 | res.status = '500 Internal Server Error' |
|
425 | 424 | res.headers['Content-Type'] = ctype |
|
426 | 425 | return rctx.sendtemplate('error', error=pycompat.bytestr(e)) |
|
427 | 426 | except ErrorResponse as e: |
|
428 | 427 | res.status = statusmessage(e.code, pycompat.bytestr(e)) |
|
429 | 428 | res.headers['Content-Type'] = ctype |
|
430 | 429 | return rctx.sendtemplate('error', error=pycompat.bytestr(e)) |
|
431 | 430 | |
|
432 | 431 | def check_perm(self, rctx, req, op): |
|
433 | 432 | for permhook in permhooks: |
|
434 | 433 | permhook(rctx, req, op) |
|
435 | 434 | |
|
436 | 435 | def getwebview(repo): |
|
437 | 436 | """The 'web.view' config controls changeset filter to hgweb. Possible |
|
438 | 437 | values are ``served``, ``visible`` and ``all``. Default is ``served``. |
|
439 | 438 | The ``served`` filter only shows changesets that can be pulled from the |
|
440 | 439 | hgweb instance. The``visible`` filter includes secret changesets but |
|
441 | 440 | still excludes "hidden" one. |
|
442 | 441 | |
|
443 | 442 | See the repoview module for details. |
|
444 | 443 | |
|
445 | 444 | The option has been around undocumented since Mercurial 2.5, but no |
|
446 | 445 | user ever asked about it. So we better keep it undocumented for now.""" |
|
447 | 446 | # experimental config: web.view |
|
448 | 447 | viewconfig = repo.ui.config('web', 'view', untrusted=True) |
|
449 | 448 | if viewconfig == 'all': |
|
450 | 449 | return repo.unfiltered() |
|
451 | 450 | elif viewconfig in repoview.filtertable: |
|
452 | 451 | return repo.filtered(viewconfig) |
|
453 | 452 | else: |
|
454 | 453 | return repo.filtered('served') |
@@ -1,538 +1,536 b'' | |||
|
1 | 1 | # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories. |
|
2 | 2 | # |
|
3 | 3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
4 | 4 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> |
|
5 | 5 | # |
|
6 | 6 | # This software may be used and distributed according to the terms of the |
|
7 | 7 | # GNU General Public License version 2 or any later version. |
|
8 | 8 | |
|
9 | 9 | from __future__ import absolute_import |
|
10 | 10 | |
|
11 | 11 | import gc |
|
12 | 12 | import os |
|
13 | 13 | import time |
|
14 | 14 | |
|
15 | 15 | from ..i18n import _ |
|
16 | 16 | |
|
17 | 17 | from .common import ( |
|
18 | 18 | ErrorResponse, |
|
19 | 19 | HTTP_SERVER_ERROR, |
|
20 | 20 | cspvalues, |
|
21 | 21 | get_contact, |
|
22 | 22 | get_mtime, |
|
23 | 23 | ismember, |
|
24 | 24 | paritygen, |
|
25 | 25 | staticfile, |
|
26 | 26 | statusmessage, |
|
27 | 27 | ) |
|
28 | 28 | |
|
29 | 29 | from .. import ( |
|
30 | 30 | configitems, |
|
31 | 31 | encoding, |
|
32 | 32 | error, |
|
33 | 33 | hg, |
|
34 | 34 | profiling, |
|
35 | 35 | pycompat, |
|
36 | 36 | scmutil, |
|
37 | 37 | templater, |
|
38 | templateutil, | |
|
39 | 38 | ui as uimod, |
|
40 | 39 | util, |
|
41 | 40 | ) |
|
42 | 41 | |
|
43 | 42 | from . import ( |
|
44 | 43 | hgweb_mod, |
|
45 | 44 | request as requestmod, |
|
46 | 45 | webutil, |
|
47 | 46 | wsgicgi, |
|
48 | 47 | ) |
|
49 | 48 | from ..utils import dateutil |
|
50 | 49 | |
|
51 | 50 | def cleannames(items): |
|
52 | 51 | return [(util.pconvert(name).strip('/'), path) for name, path in items] |
|
53 | 52 | |
|
54 | 53 | def findrepos(paths): |
|
55 | 54 | repos = [] |
|
56 | 55 | for prefix, root in cleannames(paths): |
|
57 | 56 | roothead, roottail = os.path.split(root) |
|
58 | 57 | # "foo = /bar/*" or "foo = /bar/**" lets every repo /bar/N in or below |
|
59 | 58 | # /bar/ be served as as foo/N . |
|
60 | 59 | # '*' will not search inside dirs with .hg (except .hg/patches), |
|
61 | 60 | # '**' will search inside dirs with .hg (and thus also find subrepos). |
|
62 | 61 | try: |
|
63 | 62 | recurse = {'*': False, '**': True}[roottail] |
|
64 | 63 | except KeyError: |
|
65 | 64 | repos.append((prefix, root)) |
|
66 | 65 | continue |
|
67 | 66 | roothead = os.path.normpath(os.path.abspath(roothead)) |
|
68 | 67 | paths = scmutil.walkrepos(roothead, followsym=True, recurse=recurse) |
|
69 | 68 | repos.extend(urlrepos(prefix, roothead, paths)) |
|
70 | 69 | return repos |
|
71 | 70 | |
|
72 | 71 | def urlrepos(prefix, roothead, paths): |
|
73 | 72 | """yield url paths and filesystem paths from a list of repo paths |
|
74 | 73 | |
|
75 | 74 | >>> conv = lambda seq: [(v, util.pconvert(p)) for v,p in seq] |
|
76 | 75 | >>> conv(urlrepos(b'hg', b'/opt', [b'/opt/r', b'/opt/r/r', b'/opt'])) |
|
77 | 76 | [('hg/r', '/opt/r'), ('hg/r/r', '/opt/r/r'), ('hg', '/opt')] |
|
78 | 77 | >>> conv(urlrepos(b'', b'/opt', [b'/opt/r', b'/opt/r/r', b'/opt'])) |
|
79 | 78 | [('r', '/opt/r'), ('r/r', '/opt/r/r'), ('', '/opt')] |
|
80 | 79 | """ |
|
81 | 80 | for path in paths: |
|
82 | 81 | path = os.path.normpath(path) |
|
83 | 82 | yield (prefix + '/' + |
|
84 | 83 | util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path |
|
85 | 84 | |
|
86 | 85 | def readallowed(ui, req): |
|
87 | 86 | """Check allow_read and deny_read config options of a repo's ui object |
|
88 | 87 | to determine user permissions. By default, with neither option set (or |
|
89 | 88 | both empty), allow all users to read the repo. There are two ways a |
|
90 | 89 | user can be denied read access: (1) deny_read is not empty, and the |
|
91 | 90 | user is unauthenticated or deny_read contains user (or *), and (2) |
|
92 | 91 | allow_read is not empty and the user is not in allow_read. Return True |
|
93 | 92 | if user is allowed to read the repo, else return False.""" |
|
94 | 93 | |
|
95 | 94 | user = req.remoteuser |
|
96 | 95 | |
|
97 | 96 | deny_read = ui.configlist('web', 'deny_read', untrusted=True) |
|
98 | 97 | if deny_read and (not user or ismember(ui, user, deny_read)): |
|
99 | 98 | return False |
|
100 | 99 | |
|
101 | 100 | allow_read = ui.configlist('web', 'allow_read', untrusted=True) |
|
102 | 101 | # by default, allow reading if no allow_read option has been set |
|
103 | 102 | if not allow_read or ismember(ui, user, allow_read): |
|
104 | 103 | return True |
|
105 | 104 | |
|
106 | 105 | return False |
|
107 | 106 | |
|
108 | 107 | def archivelist(ui, nodeid, url): |
|
109 | 108 | allowed = ui.configlist('web', 'allow_archive', untrusted=True) |
|
110 | 109 | archives = [] |
|
111 | 110 | |
|
112 | 111 | for typ, spec in hgweb_mod.archivespecs.iteritems(): |
|
113 | 112 | if typ in allowed or ui.configbool('web', 'allow' + typ, |
|
114 | 113 | untrusted=True): |
|
115 | 114 | archives.append({ |
|
116 | 115 | 'type': typ, |
|
117 | 116 | 'extension': spec[2], |
|
118 | 117 | 'node': nodeid, |
|
119 | 118 | 'url': url, |
|
120 | 119 | }) |
|
121 | 120 | |
|
122 | 121 | return archives |
|
123 | 122 | |
|
124 | 123 | def rawindexentries(ui, repos, req, subdir=''): |
|
125 | 124 | descend = ui.configbool('web', 'descend') |
|
126 | 125 | collapse = ui.configbool('web', 'collapse') |
|
127 | 126 | seenrepos = set() |
|
128 | 127 | seendirs = set() |
|
129 | 128 | for name, path in repos: |
|
130 | 129 | |
|
131 | 130 | if not name.startswith(subdir): |
|
132 | 131 | continue |
|
133 | 132 | name = name[len(subdir):] |
|
134 | 133 | directory = False |
|
135 | 134 | |
|
136 | 135 | if '/' in name: |
|
137 | 136 | if not descend: |
|
138 | 137 | continue |
|
139 | 138 | |
|
140 | 139 | nameparts = name.split('/') |
|
141 | 140 | rootname = nameparts[0] |
|
142 | 141 | |
|
143 | 142 | if not collapse: |
|
144 | 143 | pass |
|
145 | 144 | elif rootname in seendirs: |
|
146 | 145 | continue |
|
147 | 146 | elif rootname in seenrepos: |
|
148 | 147 | pass |
|
149 | 148 | else: |
|
150 | 149 | directory = True |
|
151 | 150 | name = rootname |
|
152 | 151 | |
|
153 | 152 | # redefine the path to refer to the directory |
|
154 | 153 | discarded = '/'.join(nameparts[1:]) |
|
155 | 154 | |
|
156 | 155 | # remove name parts plus accompanying slash |
|
157 | 156 | path = path[:-len(discarded) - 1] |
|
158 | 157 | |
|
159 | 158 | try: |
|
160 | 159 | r = hg.repository(ui, path) |
|
161 | 160 | directory = False |
|
162 | 161 | except (IOError, error.RepoError): |
|
163 | 162 | pass |
|
164 | 163 | |
|
165 | 164 | parts = [ |
|
166 | 165 | req.apppath.strip('/'), |
|
167 | 166 | subdir.strip('/'), |
|
168 | 167 | name.strip('/'), |
|
169 | 168 | ] |
|
170 | 169 | url = '/' + '/'.join(p for p in parts if p) + '/' |
|
171 | 170 | |
|
172 | 171 | # show either a directory entry or a repository |
|
173 | 172 | if directory: |
|
174 | 173 | # get the directory's time information |
|
175 | 174 | try: |
|
176 | 175 | d = (get_mtime(path), dateutil.makedate()[1]) |
|
177 | 176 | except OSError: |
|
178 | 177 | continue |
|
179 | 178 | |
|
180 | 179 | # add '/' to the name to make it obvious that |
|
181 | 180 | # the entry is a directory, not a regular repository |
|
182 | 181 | row = {'contact': "", |
|
183 | 182 | 'contact_sort': "", |
|
184 | 183 | 'name': name + '/', |
|
185 | 184 | 'name_sort': name, |
|
186 | 185 | 'url': url, |
|
187 | 186 | 'description': "", |
|
188 | 187 | 'description_sort': "", |
|
189 | 188 | 'lastchange': d, |
|
190 | 189 | 'lastchange_sort': d[1] - d[0], |
|
191 | 190 | 'archives': [], |
|
192 | 191 | 'isdirectory': True, |
|
193 | 192 | 'labels': [], |
|
194 | 193 | } |
|
195 | 194 | |
|
196 | 195 | seendirs.add(name) |
|
197 | 196 | yield row |
|
198 | 197 | continue |
|
199 | 198 | |
|
200 | 199 | u = ui.copy() |
|
201 | 200 | try: |
|
202 | 201 | u.readconfig(os.path.join(path, '.hg', 'hgrc')) |
|
203 | 202 | except Exception as e: |
|
204 | 203 | u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e)) |
|
205 | 204 | continue |
|
206 | 205 | |
|
207 | 206 | def get(section, name, default=uimod._unset): |
|
208 | 207 | return u.config(section, name, default, untrusted=True) |
|
209 | 208 | |
|
210 | 209 | if u.configbool("web", "hidden", untrusted=True): |
|
211 | 210 | continue |
|
212 | 211 | |
|
213 | 212 | if not readallowed(u, req): |
|
214 | 213 | continue |
|
215 | 214 | |
|
216 | 215 | # update time with local timezone |
|
217 | 216 | try: |
|
218 | 217 | r = hg.repository(ui, path) |
|
219 | 218 | except IOError: |
|
220 | 219 | u.warn(_('error accessing repository at %s\n') % path) |
|
221 | 220 | continue |
|
222 | 221 | except error.RepoError: |
|
223 | 222 | u.warn(_('error accessing repository at %s\n') % path) |
|
224 | 223 | continue |
|
225 | 224 | try: |
|
226 | 225 | d = (get_mtime(r.spath), dateutil.makedate()[1]) |
|
227 | 226 | except OSError: |
|
228 | 227 | continue |
|
229 | 228 | |
|
230 | 229 | contact = get_contact(get) |
|
231 | 230 | description = get("web", "description") |
|
232 | 231 | seenrepos.add(name) |
|
233 | 232 | name = get("web", "name", name) |
|
234 | 233 | row = {'contact': contact or "unknown", |
|
235 | 234 | 'contact_sort': contact.upper() or "unknown", |
|
236 | 235 | 'name': name, |
|
237 | 236 | 'name_sort': name, |
|
238 | 237 | 'url': url, |
|
239 | 238 | 'description': description or "unknown", |
|
240 | 239 | 'description_sort': description.upper() or "unknown", |
|
241 | 240 | 'lastchange': d, |
|
242 | 241 | 'lastchange_sort': d[1] - d[0], |
|
243 | 242 | 'archives': archivelist(u, "tip", url), |
|
244 | 243 | 'isdirectory': None, |
|
245 | 244 | 'labels': u.configlist('web', 'labels', untrusted=True), |
|
246 | 245 | } |
|
247 | 246 | |
|
248 | 247 | yield row |
|
249 | 248 | |
|
250 | 249 | def indexentries(ui, repos, req, stripecount, sortcolumn='', |
|
251 | 250 | descending=False, subdir=''): |
|
252 | 251 | |
|
253 | 252 | rows = rawindexentries(ui, repos, req, subdir=subdir) |
|
254 | 253 | |
|
255 | 254 | sortdefault = None, False |
|
256 | 255 | |
|
257 | 256 | if sortcolumn and sortdefault != (sortcolumn, descending): |
|
258 | 257 | sortkey = '%s_sort' % sortcolumn |
|
259 | 258 | rows = sorted(rows, key=lambda x: x[sortkey], |
|
260 | 259 | reverse=descending) |
|
261 | 260 | |
|
262 | 261 | for row, parity in zip(rows, paritygen(stripecount)): |
|
263 | 262 | row['parity'] = parity |
|
264 | 263 | yield row |
|
265 | 264 | |
|
266 | 265 | class hgwebdir(object): |
|
267 | 266 | """HTTP server for multiple repositories. |
|
268 | 267 | |
|
269 | 268 | Given a configuration, different repositories will be served depending |
|
270 | 269 | on the request path. |
|
271 | 270 | |
|
272 | 271 | Instances are typically used as WSGI applications. |
|
273 | 272 | """ |
|
274 | 273 | def __init__(self, conf, baseui=None): |
|
275 | 274 | self.conf = conf |
|
276 | 275 | self.baseui = baseui |
|
277 | 276 | self.ui = None |
|
278 | 277 | self.lastrefresh = 0 |
|
279 | 278 | self.motd = None |
|
280 | 279 | self.refresh() |
|
281 | 280 | |
|
282 | 281 | def refresh(self): |
|
283 | 282 | if self.ui: |
|
284 | 283 | refreshinterval = self.ui.configint('web', 'refreshinterval') |
|
285 | 284 | else: |
|
286 | 285 | item = configitems.coreitems['web']['refreshinterval'] |
|
287 | 286 | refreshinterval = item.default |
|
288 | 287 | |
|
289 | 288 | # refreshinterval <= 0 means to always refresh. |
|
290 | 289 | if (refreshinterval > 0 and |
|
291 | 290 | self.lastrefresh + refreshinterval > time.time()): |
|
292 | 291 | return |
|
293 | 292 | |
|
294 | 293 | if self.baseui: |
|
295 | 294 | u = self.baseui.copy() |
|
296 | 295 | else: |
|
297 | 296 | u = uimod.ui.load() |
|
298 | 297 | u.setconfig('ui', 'report_untrusted', 'off', 'hgwebdir') |
|
299 | 298 | u.setconfig('ui', 'nontty', 'true', 'hgwebdir') |
|
300 | 299 | # displaying bundling progress bar while serving feels wrong and may |
|
301 | 300 | # break some wsgi implementations. |
|
302 | 301 | u.setconfig('progress', 'disable', 'true', 'hgweb') |
|
303 | 302 | |
|
304 | 303 | if not isinstance(self.conf, (dict, list, tuple)): |
|
305 | 304 | map = {'paths': 'hgweb-paths'} |
|
306 | 305 | if not os.path.exists(self.conf): |
|
307 | 306 | raise error.Abort(_('config file %s not found!') % self.conf) |
|
308 | 307 | u.readconfig(self.conf, remap=map, trust=True) |
|
309 | 308 | paths = [] |
|
310 | 309 | for name, ignored in u.configitems('hgweb-paths'): |
|
311 | 310 | for path in u.configlist('hgweb-paths', name): |
|
312 | 311 | paths.append((name, path)) |
|
313 | 312 | elif isinstance(self.conf, (list, tuple)): |
|
314 | 313 | paths = self.conf |
|
315 | 314 | elif isinstance(self.conf, dict): |
|
316 | 315 | paths = self.conf.items() |
|
317 | 316 | |
|
318 | 317 | repos = findrepos(paths) |
|
319 | 318 | for prefix, root in u.configitems('collections'): |
|
320 | 319 | prefix = util.pconvert(prefix) |
|
321 | 320 | for path in scmutil.walkrepos(root, followsym=True): |
|
322 | 321 | repo = os.path.normpath(path) |
|
323 | 322 | name = util.pconvert(repo) |
|
324 | 323 | if name.startswith(prefix): |
|
325 | 324 | name = name[len(prefix):] |
|
326 | 325 | repos.append((name.lstrip('/'), repo)) |
|
327 | 326 | |
|
328 | 327 | self.repos = repos |
|
329 | 328 | self.ui = u |
|
330 | 329 | encoding.encoding = self.ui.config('web', 'encoding') |
|
331 | 330 | self.style = self.ui.config('web', 'style') |
|
332 | 331 | self.templatepath = self.ui.config('web', 'templates', untrusted=False) |
|
333 | 332 | self.stripecount = self.ui.config('web', 'stripes') |
|
334 | 333 | if self.stripecount: |
|
335 | 334 | self.stripecount = int(self.stripecount) |
|
336 | 335 | prefix = self.ui.config('web', 'prefix') |
|
337 | 336 | if prefix.startswith('/'): |
|
338 | 337 | prefix = prefix[1:] |
|
339 | 338 | if prefix.endswith('/'): |
|
340 | 339 | prefix = prefix[:-1] |
|
341 | 340 | self.prefix = prefix |
|
342 | 341 | self.lastrefresh = time.time() |
|
343 | 342 | |
|
344 | 343 | def run(self): |
|
345 | 344 | if not encoding.environ.get('GATEWAY_INTERFACE', |
|
346 | 345 | '').startswith("CGI/1."): |
|
347 | 346 | raise RuntimeError("This function is only intended to be " |
|
348 | 347 | "called while running as a CGI script.") |
|
349 | 348 | wsgicgi.launch(self) |
|
350 | 349 | |
|
351 | 350 | def __call__(self, env, respond): |
|
352 | 351 | baseurl = self.ui.config('web', 'baseurl') |
|
353 | 352 | req = requestmod.parserequestfromenv(env, altbaseurl=baseurl) |
|
354 | 353 | res = requestmod.wsgiresponse(req, respond) |
|
355 | 354 | |
|
356 | 355 | return self.run_wsgi(req, res) |
|
357 | 356 | |
|
358 | 357 | def run_wsgi(self, req, res): |
|
359 | 358 | profile = self.ui.configbool('profiling', 'enabled') |
|
360 | 359 | with profiling.profile(self.ui, enabled=profile): |
|
361 | 360 | try: |
|
362 | 361 | for r in self._runwsgi(req, res): |
|
363 | 362 | yield r |
|
364 | 363 | finally: |
|
365 | 364 | # There are known cycles in localrepository that prevent |
|
366 | 365 | # those objects (and tons of held references) from being |
|
367 | 366 | # collected through normal refcounting. We mitigate those |
|
368 | 367 | # leaks by performing an explicit GC on every request. |
|
369 | 368 | # TODO remove this once leaks are fixed. |
|
370 | 369 | # TODO only run this on requests that create localrepository |
|
371 | 370 | # instances instead of every request. |
|
372 | 371 | gc.collect() |
|
373 | 372 | |
|
374 | 373 | def _runwsgi(self, req, res): |
|
375 | 374 | try: |
|
376 | 375 | self.refresh() |
|
377 | 376 | |
|
378 | 377 | csp, nonce = cspvalues(self.ui) |
|
379 | 378 | if csp: |
|
380 | 379 | res.headers['Content-Security-Policy'] = csp |
|
381 | 380 | |
|
382 | 381 | virtual = req.dispatchpath.strip('/') |
|
383 | 382 | tmpl = self.templater(req, nonce) |
|
384 |
ctype = tmpl('mimetype', encoding |
|
|
385 | ctype = templateutil.stringify(ctype) | |
|
383 | ctype = tmpl.render('mimetype', {'encoding': encoding.encoding}) | |
|
386 | 384 | |
|
387 | 385 | # Global defaults. These can be overridden by any handler. |
|
388 | 386 | res.status = '200 Script output follows' |
|
389 | 387 | res.headers['Content-Type'] = ctype |
|
390 | 388 | |
|
391 | 389 | # a static file |
|
392 | 390 | if virtual.startswith('static/') or 'static' in req.qsparams: |
|
393 | 391 | if virtual.startswith('static/'): |
|
394 | 392 | fname = virtual[7:] |
|
395 | 393 | else: |
|
396 | 394 | fname = req.qsparams['static'] |
|
397 | 395 | static = self.ui.config("web", "static", None, |
|
398 | 396 | untrusted=False) |
|
399 | 397 | if not static: |
|
400 | 398 | tp = self.templatepath or templater.templatepaths() |
|
401 | 399 | if isinstance(tp, str): |
|
402 | 400 | tp = [tp] |
|
403 | 401 | static = [os.path.join(p, 'static') for p in tp] |
|
404 | 402 | |
|
405 | 403 | staticfile(static, fname, res) |
|
406 | 404 | return res.sendresponse() |
|
407 | 405 | |
|
408 | 406 | # top-level index |
|
409 | 407 | |
|
410 | 408 | repos = dict(self.repos) |
|
411 | 409 | |
|
412 | 410 | if (not virtual or virtual == 'index') and virtual not in repos: |
|
413 | 411 | return self.makeindex(req, res, tmpl) |
|
414 | 412 | |
|
415 | 413 | # nested indexes and hgwebs |
|
416 | 414 | |
|
417 | 415 | if virtual.endswith('/index') and virtual not in repos: |
|
418 | 416 | subdir = virtual[:-len('index')] |
|
419 | 417 | if any(r.startswith(subdir) for r in repos): |
|
420 | 418 | return self.makeindex(req, res, tmpl, subdir) |
|
421 | 419 | |
|
422 | 420 | def _virtualdirs(): |
|
423 | 421 | # Check the full virtual path, each parent, and the root ('') |
|
424 | 422 | if virtual != '': |
|
425 | 423 | yield virtual |
|
426 | 424 | |
|
427 | 425 | for p in util.finddirs(virtual): |
|
428 | 426 | yield p |
|
429 | 427 | |
|
430 | 428 | yield '' |
|
431 | 429 | |
|
432 | 430 | for virtualrepo in _virtualdirs(): |
|
433 | 431 | real = repos.get(virtualrepo) |
|
434 | 432 | if real: |
|
435 | 433 | # Re-parse the WSGI environment to take into account our |
|
436 | 434 | # repository path component. |
|
437 | 435 | req = requestmod.parserequestfromenv( |
|
438 | 436 | req.rawenv, reponame=virtualrepo, |
|
439 | 437 | altbaseurl=self.ui.config('web', 'baseurl')) |
|
440 | 438 | try: |
|
441 | 439 | # ensure caller gets private copy of ui |
|
442 | 440 | repo = hg.repository(self.ui.copy(), real) |
|
443 | 441 | return hgweb_mod.hgweb(repo).run_wsgi(req, res) |
|
444 | 442 | except IOError as inst: |
|
445 | 443 | msg = encoding.strtolocal(inst.strerror) |
|
446 | 444 | raise ErrorResponse(HTTP_SERVER_ERROR, msg) |
|
447 | 445 | except error.RepoError as inst: |
|
448 | 446 | raise ErrorResponse(HTTP_SERVER_ERROR, bytes(inst)) |
|
449 | 447 | |
|
450 | 448 | # browse subdirectories |
|
451 | 449 | subdir = virtual + '/' |
|
452 | 450 | if [r for r in repos if r.startswith(subdir)]: |
|
453 | 451 | return self.makeindex(req, res, tmpl, subdir) |
|
454 | 452 | |
|
455 | 453 | # prefixes not found |
|
456 | 454 | res.status = '404 Not Found' |
|
457 | 455 | res.setbodygen(tmpl('notfound', repo=virtual)) |
|
458 | 456 | return res.sendresponse() |
|
459 | 457 | |
|
460 | 458 | except ErrorResponse as e: |
|
461 | 459 | res.status = statusmessage(e.code, pycompat.bytestr(e)) |
|
462 | 460 | res.setbodygen(tmpl('error', error=e.message or '')) |
|
463 | 461 | return res.sendresponse() |
|
464 | 462 | finally: |
|
465 | 463 | tmpl = None |
|
466 | 464 | |
|
467 | 465 | def makeindex(self, req, res, tmpl, subdir=""): |
|
468 | 466 | self.refresh() |
|
469 | 467 | sortable = ["name", "description", "contact", "lastchange"] |
|
470 | 468 | sortcolumn, descending = None, False |
|
471 | 469 | if 'sort' in req.qsparams: |
|
472 | 470 | sortcolumn = req.qsparams['sort'] |
|
473 | 471 | descending = sortcolumn.startswith('-') |
|
474 | 472 | if descending: |
|
475 | 473 | sortcolumn = sortcolumn[1:] |
|
476 | 474 | if sortcolumn not in sortable: |
|
477 | 475 | sortcolumn = "" |
|
478 | 476 | |
|
479 | 477 | sort = [("sort_%s" % column, |
|
480 | 478 | "%s%s" % ((not descending and column == sortcolumn) |
|
481 | 479 | and "-" or "", column)) |
|
482 | 480 | for column in sortable] |
|
483 | 481 | |
|
484 | 482 | self.refresh() |
|
485 | 483 | |
|
486 | 484 | entries = indexentries(self.ui, self.repos, req, |
|
487 | 485 | self.stripecount, sortcolumn=sortcolumn, |
|
488 | 486 | descending=descending, subdir=subdir) |
|
489 | 487 | |
|
490 | 488 | res.setbodygen(tmpl( |
|
491 | 489 | 'index', |
|
492 | 490 | entries=entries, |
|
493 | 491 | subdir=subdir, |
|
494 | 492 | pathdef=hgweb_mod.makebreadcrumb('/' + subdir, self.prefix), |
|
495 | 493 | sortcolumn=sortcolumn, |
|
496 | 494 | descending=descending, |
|
497 | 495 | **dict(sort))) |
|
498 | 496 | |
|
499 | 497 | return res.sendresponse() |
|
500 | 498 | |
|
501 | 499 | def templater(self, req, nonce): |
|
502 | 500 | |
|
503 | 501 | def motd(**map): |
|
504 | 502 | if self.motd is not None: |
|
505 | 503 | yield self.motd |
|
506 | 504 | else: |
|
507 | 505 | yield config('web', 'motd') |
|
508 | 506 | |
|
509 | 507 | def config(section, name, default=uimod._unset, untrusted=True): |
|
510 | 508 | return self.ui.config(section, name, default, untrusted) |
|
511 | 509 | |
|
512 | 510 | vars = {} |
|
513 | 511 | styles, (style, mapfile) = hgweb_mod.getstyle(req, config, |
|
514 | 512 | self.templatepath) |
|
515 | 513 | if style == styles[0]: |
|
516 | 514 | vars['style'] = style |
|
517 | 515 | |
|
518 | 516 | sessionvars = webutil.sessionvars(vars, r'?') |
|
519 | 517 | logourl = config('web', 'logourl') |
|
520 | 518 | logoimg = config('web', 'logoimg') |
|
521 | 519 | staticurl = (config('web', 'staticurl') |
|
522 | 520 | or req.apppath + '/static/') |
|
523 | 521 | if not staticurl.endswith('/'): |
|
524 | 522 | staticurl += '/' |
|
525 | 523 | |
|
526 | 524 | defaults = { |
|
527 | 525 | "encoding": encoding.encoding, |
|
528 | 526 | "motd": motd, |
|
529 | 527 | "url": req.apppath + '/', |
|
530 | 528 | "logourl": logourl, |
|
531 | 529 | "logoimg": logoimg, |
|
532 | 530 | "staticurl": staticurl, |
|
533 | 531 | "sessionvars": sessionvars, |
|
534 | 532 | "style": style, |
|
535 | 533 | "nonce": nonce, |
|
536 | 534 | } |
|
537 | 535 | tmpl = templater.templater.frommapfile(mapfile, defaults=defaults) |
|
538 | 536 | return tmpl |
@@ -1,942 +1,936 b'' | |||
|
1 | 1 | # logcmdutil.py - utility for log-like commands |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | import itertools |
|
11 | 11 | import os |
|
12 | 12 | |
|
13 | 13 | from .i18n import _ |
|
14 | 14 | from .node import ( |
|
15 | 15 | hex, |
|
16 | 16 | nullid, |
|
17 | 17 | ) |
|
18 | 18 | |
|
19 | 19 | from . import ( |
|
20 | 20 | dagop, |
|
21 | 21 | encoding, |
|
22 | 22 | error, |
|
23 | 23 | formatter, |
|
24 | 24 | graphmod, |
|
25 | 25 | match as matchmod, |
|
26 | 26 | mdiff, |
|
27 | 27 | patch, |
|
28 | 28 | pathutil, |
|
29 | 29 | pycompat, |
|
30 | 30 | revset, |
|
31 | 31 | revsetlang, |
|
32 | 32 | scmutil, |
|
33 | 33 | smartset, |
|
34 | 34 | templatekw, |
|
35 | 35 | templater, |
|
36 | templateutil, | |
|
37 | 36 | util, |
|
38 | 37 | ) |
|
39 | 38 | from .utils import dateutil |
|
40 | 39 | |
|
41 | 40 | def getlimit(opts): |
|
42 | 41 | """get the log limit according to option -l/--limit""" |
|
43 | 42 | limit = opts.get('limit') |
|
44 | 43 | if limit: |
|
45 | 44 | try: |
|
46 | 45 | limit = int(limit) |
|
47 | 46 | except ValueError: |
|
48 | 47 | raise error.Abort(_('limit must be a positive integer')) |
|
49 | 48 | if limit <= 0: |
|
50 | 49 | raise error.Abort(_('limit must be positive')) |
|
51 | 50 | else: |
|
52 | 51 | limit = None |
|
53 | 52 | return limit |
|
54 | 53 | |
|
55 | 54 | def diffordiffstat(ui, repo, diffopts, node1, node2, match, |
|
56 | 55 | changes=None, stat=False, fp=None, prefix='', |
|
57 | 56 | root='', listsubrepos=False, hunksfilterfn=None): |
|
58 | 57 | '''show diff or diffstat.''' |
|
59 | 58 | if root: |
|
60 | 59 | relroot = pathutil.canonpath(repo.root, repo.getcwd(), root) |
|
61 | 60 | else: |
|
62 | 61 | relroot = '' |
|
63 | 62 | if relroot != '': |
|
64 | 63 | # XXX relative roots currently don't work if the root is within a |
|
65 | 64 | # subrepo |
|
66 | 65 | uirelroot = match.uipath(relroot) |
|
67 | 66 | relroot += '/' |
|
68 | 67 | for matchroot in match.files(): |
|
69 | 68 | if not matchroot.startswith(relroot): |
|
70 | 69 | ui.warn(_('warning: %s not inside relative root %s\n') % ( |
|
71 | 70 | match.uipath(matchroot), uirelroot)) |
|
72 | 71 | |
|
73 | 72 | if stat: |
|
74 | 73 | diffopts = diffopts.copy(context=0, noprefix=False) |
|
75 | 74 | width = 80 |
|
76 | 75 | if not ui.plain(): |
|
77 | 76 | width = ui.termwidth() |
|
78 | 77 | |
|
79 | 78 | chunks = patch.diff(repo, node1, node2, match, changes, opts=diffopts, |
|
80 | 79 | prefix=prefix, relroot=relroot, |
|
81 | 80 | hunksfilterfn=hunksfilterfn) |
|
82 | 81 | |
|
83 | 82 | if fp is not None or ui.canwritewithoutlabels(): |
|
84 | 83 | out = fp or ui |
|
85 | 84 | if stat: |
|
86 | 85 | chunks = [patch.diffstat(util.iterlines(chunks), width=width)] |
|
87 | 86 | for chunk in util.filechunkiter(util.chunkbuffer(chunks)): |
|
88 | 87 | out.write(chunk) |
|
89 | 88 | else: |
|
90 | 89 | if stat: |
|
91 | 90 | chunks = patch.diffstatui(util.iterlines(chunks), width=width) |
|
92 | 91 | else: |
|
93 | 92 | chunks = patch.difflabel(lambda chunks, **kwargs: chunks, chunks, |
|
94 | 93 | opts=diffopts) |
|
95 | 94 | if ui.canbatchlabeledwrites(): |
|
96 | 95 | def gen(): |
|
97 | 96 | for chunk, label in chunks: |
|
98 | 97 | yield ui.label(chunk, label=label) |
|
99 | 98 | for chunk in util.filechunkiter(util.chunkbuffer(gen())): |
|
100 | 99 | ui.write(chunk) |
|
101 | 100 | else: |
|
102 | 101 | for chunk, label in chunks: |
|
103 | 102 | ui.write(chunk, label=label) |
|
104 | 103 | |
|
105 | 104 | if listsubrepos: |
|
106 | 105 | ctx1 = repo[node1] |
|
107 | 106 | ctx2 = repo[node2] |
|
108 | 107 | for subpath, sub in scmutil.itersubrepos(ctx1, ctx2): |
|
109 | 108 | tempnode2 = node2 |
|
110 | 109 | try: |
|
111 | 110 | if node2 is not None: |
|
112 | 111 | tempnode2 = ctx2.substate[subpath][1] |
|
113 | 112 | except KeyError: |
|
114 | 113 | # A subrepo that existed in node1 was deleted between node1 and |
|
115 | 114 | # node2 (inclusive). Thus, ctx2's substate won't contain that |
|
116 | 115 | # subpath. The best we can do is to ignore it. |
|
117 | 116 | tempnode2 = None |
|
118 | 117 | submatch = matchmod.subdirmatcher(subpath, match) |
|
119 | 118 | sub.diff(ui, diffopts, tempnode2, submatch, changes=changes, |
|
120 | 119 | stat=stat, fp=fp, prefix=prefix) |
|
121 | 120 | |
|
122 | 121 | class changesetdiffer(object): |
|
123 | 122 | """Generate diff of changeset with pre-configured filtering functions""" |
|
124 | 123 | |
|
125 | 124 | def _makefilematcher(self, ctx): |
|
126 | 125 | return scmutil.matchall(ctx.repo()) |
|
127 | 126 | |
|
128 | 127 | def _makehunksfilter(self, ctx): |
|
129 | 128 | return None |
|
130 | 129 | |
|
131 | 130 | def showdiff(self, ui, ctx, diffopts, stat=False): |
|
132 | 131 | repo = ctx.repo() |
|
133 | 132 | node = ctx.node() |
|
134 | 133 | prev = ctx.p1().node() |
|
135 | 134 | diffordiffstat(ui, repo, diffopts, prev, node, |
|
136 | 135 | match=self._makefilematcher(ctx), stat=stat, |
|
137 | 136 | hunksfilterfn=self._makehunksfilter(ctx)) |
|
138 | 137 | |
|
139 | 138 | def changesetlabels(ctx): |
|
140 | 139 | labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()] |
|
141 | 140 | if ctx.obsolete(): |
|
142 | 141 | labels.append('changeset.obsolete') |
|
143 | 142 | if ctx.isunstable(): |
|
144 | 143 | labels.append('changeset.unstable') |
|
145 | 144 | for instability in ctx.instabilities(): |
|
146 | 145 | labels.append('instability.%s' % instability) |
|
147 | 146 | return ' '.join(labels) |
|
148 | 147 | |
|
149 | 148 | class changesetprinter(object): |
|
150 | 149 | '''show changeset information when templating not requested.''' |
|
151 | 150 | |
|
152 | 151 | def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False): |
|
153 | 152 | self.ui = ui |
|
154 | 153 | self.repo = repo |
|
155 | 154 | self.buffered = buffered |
|
156 | 155 | self._differ = differ or changesetdiffer() |
|
157 | 156 | self.diffopts = diffopts or {} |
|
158 | 157 | self.header = {} |
|
159 | 158 | self.hunk = {} |
|
160 | 159 | self.lastheader = None |
|
161 | 160 | self.footer = None |
|
162 | 161 | self._columns = templatekw.getlogcolumns() |
|
163 | 162 | |
|
164 | 163 | def flush(self, ctx): |
|
165 | 164 | rev = ctx.rev() |
|
166 | 165 | if rev in self.header: |
|
167 | 166 | h = self.header[rev] |
|
168 | 167 | if h != self.lastheader: |
|
169 | 168 | self.lastheader = h |
|
170 | 169 | self.ui.write(h) |
|
171 | 170 | del self.header[rev] |
|
172 | 171 | if rev in self.hunk: |
|
173 | 172 | self.ui.write(self.hunk[rev]) |
|
174 | 173 | del self.hunk[rev] |
|
175 | 174 | |
|
176 | 175 | def close(self): |
|
177 | 176 | if self.footer: |
|
178 | 177 | self.ui.write(self.footer) |
|
179 | 178 | |
|
180 | 179 | def show(self, ctx, copies=None, **props): |
|
181 | 180 | props = pycompat.byteskwargs(props) |
|
182 | 181 | if self.buffered: |
|
183 | 182 | self.ui.pushbuffer(labeled=True) |
|
184 | 183 | self._show(ctx, copies, props) |
|
185 | 184 | self.hunk[ctx.rev()] = self.ui.popbuffer() |
|
186 | 185 | else: |
|
187 | 186 | self._show(ctx, copies, props) |
|
188 | 187 | |
|
189 | 188 | def _show(self, ctx, copies, props): |
|
190 | 189 | '''show a single changeset or file revision''' |
|
191 | 190 | changenode = ctx.node() |
|
192 | 191 | rev = ctx.rev() |
|
193 | 192 | |
|
194 | 193 | if self.ui.quiet: |
|
195 | 194 | self.ui.write("%s\n" % scmutil.formatchangeid(ctx), |
|
196 | 195 | label='log.node') |
|
197 | 196 | return |
|
198 | 197 | |
|
199 | 198 | columns = self._columns |
|
200 | 199 | self.ui.write(columns['changeset'] % scmutil.formatchangeid(ctx), |
|
201 | 200 | label=changesetlabels(ctx)) |
|
202 | 201 | |
|
203 | 202 | # branches are shown first before any other names due to backwards |
|
204 | 203 | # compatibility |
|
205 | 204 | branch = ctx.branch() |
|
206 | 205 | # don't show the default branch name |
|
207 | 206 | if branch != 'default': |
|
208 | 207 | self.ui.write(columns['branch'] % branch, label='log.branch') |
|
209 | 208 | |
|
210 | 209 | for nsname, ns in self.repo.names.iteritems(): |
|
211 | 210 | # branches has special logic already handled above, so here we just |
|
212 | 211 | # skip it |
|
213 | 212 | if nsname == 'branches': |
|
214 | 213 | continue |
|
215 | 214 | # we will use the templatename as the color name since those two |
|
216 | 215 | # should be the same |
|
217 | 216 | for name in ns.names(self.repo, changenode): |
|
218 | 217 | self.ui.write(ns.logfmt % name, |
|
219 | 218 | label='log.%s' % ns.colorname) |
|
220 | 219 | if self.ui.debugflag: |
|
221 | 220 | self.ui.write(columns['phase'] % ctx.phasestr(), label='log.phase') |
|
222 | 221 | for pctx in scmutil.meaningfulparents(self.repo, ctx): |
|
223 | 222 | label = 'log.parent changeset.%s' % pctx.phasestr() |
|
224 | 223 | self.ui.write(columns['parent'] % scmutil.formatchangeid(pctx), |
|
225 | 224 | label=label) |
|
226 | 225 | |
|
227 | 226 | if self.ui.debugflag and rev is not None: |
|
228 | 227 | mnode = ctx.manifestnode() |
|
229 | 228 | mrev = self.repo.manifestlog._revlog.rev(mnode) |
|
230 | 229 | self.ui.write(columns['manifest'] |
|
231 | 230 | % scmutil.formatrevnode(self.ui, mrev, mnode), |
|
232 | 231 | label='ui.debug log.manifest') |
|
233 | 232 | self.ui.write(columns['user'] % ctx.user(), label='log.user') |
|
234 | 233 | self.ui.write(columns['date'] % dateutil.datestr(ctx.date()), |
|
235 | 234 | label='log.date') |
|
236 | 235 | |
|
237 | 236 | if ctx.isunstable(): |
|
238 | 237 | instabilities = ctx.instabilities() |
|
239 | 238 | self.ui.write(columns['instability'] % ', '.join(instabilities), |
|
240 | 239 | label='log.instability') |
|
241 | 240 | |
|
242 | 241 | elif ctx.obsolete(): |
|
243 | 242 | self._showobsfate(ctx) |
|
244 | 243 | |
|
245 | 244 | self._exthook(ctx) |
|
246 | 245 | |
|
247 | 246 | if self.ui.debugflag: |
|
248 | 247 | files = ctx.p1().status(ctx)[:3] |
|
249 | 248 | for key, value in zip(['files', 'files+', 'files-'], files): |
|
250 | 249 | if value: |
|
251 | 250 | self.ui.write(columns[key] % " ".join(value), |
|
252 | 251 | label='ui.debug log.files') |
|
253 | 252 | elif ctx.files() and self.ui.verbose: |
|
254 | 253 | self.ui.write(columns['files'] % " ".join(ctx.files()), |
|
255 | 254 | label='ui.note log.files') |
|
256 | 255 | if copies and self.ui.verbose: |
|
257 | 256 | copies = ['%s (%s)' % c for c in copies] |
|
258 | 257 | self.ui.write(columns['copies'] % ' '.join(copies), |
|
259 | 258 | label='ui.note log.copies') |
|
260 | 259 | |
|
261 | 260 | extra = ctx.extra() |
|
262 | 261 | if extra and self.ui.debugflag: |
|
263 | 262 | for key, value in sorted(extra.items()): |
|
264 | 263 | self.ui.write(columns['extra'] % (key, util.escapestr(value)), |
|
265 | 264 | label='ui.debug log.extra') |
|
266 | 265 | |
|
267 | 266 | description = ctx.description().strip() |
|
268 | 267 | if description: |
|
269 | 268 | if self.ui.verbose: |
|
270 | 269 | self.ui.write(_("description:\n"), |
|
271 | 270 | label='ui.note log.description') |
|
272 | 271 | self.ui.write(description, |
|
273 | 272 | label='ui.note log.description') |
|
274 | 273 | self.ui.write("\n\n") |
|
275 | 274 | else: |
|
276 | 275 | self.ui.write(columns['summary'] % description.splitlines()[0], |
|
277 | 276 | label='log.summary') |
|
278 | 277 | self.ui.write("\n") |
|
279 | 278 | |
|
280 | 279 | self._showpatch(ctx) |
|
281 | 280 | |
|
282 | 281 | def _showobsfate(self, ctx): |
|
283 | 282 | # TODO: do not depend on templater |
|
284 | 283 | tres = formatter.templateresources(self.repo.ui, self.repo) |
|
285 | 284 | t = formatter.maketemplater(self.repo.ui, '{join(obsfate, "\n")}', |
|
286 | 285 | defaults=templatekw.keywords, |
|
287 | 286 | resources=tres) |
|
288 | 287 | obsfate = t.renderdefault({'ctx': ctx, 'revcache': {}}).splitlines() |
|
289 | 288 | |
|
290 | 289 | if obsfate: |
|
291 | 290 | for obsfateline in obsfate: |
|
292 | 291 | self.ui.write(self._columns['obsolete'] % obsfateline, |
|
293 | 292 | label='log.obsfate') |
|
294 | 293 | |
|
295 | 294 | def _exthook(self, ctx): |
|
296 | 295 | '''empty method used by extension as a hook point |
|
297 | 296 | ''' |
|
298 | 297 | |
|
299 | 298 | def _showpatch(self, ctx): |
|
300 | 299 | stat = self.diffopts.get('stat') |
|
301 | 300 | diff = self.diffopts.get('patch') |
|
302 | 301 | diffopts = patch.diffallopts(self.ui, self.diffopts) |
|
303 | 302 | if stat: |
|
304 | 303 | self._differ.showdiff(self.ui, ctx, diffopts, stat=True) |
|
305 | 304 | if stat and diff: |
|
306 | 305 | self.ui.write("\n") |
|
307 | 306 | if diff: |
|
308 | 307 | self._differ.showdiff(self.ui, ctx, diffopts, stat=False) |
|
309 | 308 | if stat or diff: |
|
310 | 309 | self.ui.write("\n") |
|
311 | 310 | |
|
312 | 311 | class jsonchangeset(changesetprinter): |
|
313 | 312 | '''format changeset information.''' |
|
314 | 313 | |
|
315 | 314 | def __init__(self, ui, repo, differ=None, diffopts=None, buffered=False): |
|
316 | 315 | changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered) |
|
317 | 316 | self.cache = {} |
|
318 | 317 | self._first = True |
|
319 | 318 | |
|
320 | 319 | def close(self): |
|
321 | 320 | if not self._first: |
|
322 | 321 | self.ui.write("\n]\n") |
|
323 | 322 | else: |
|
324 | 323 | self.ui.write("[]\n") |
|
325 | 324 | |
|
326 | 325 | def _show(self, ctx, copies, props): |
|
327 | 326 | '''show a single changeset or file revision''' |
|
328 | 327 | rev = ctx.rev() |
|
329 | 328 | if rev is None: |
|
330 | 329 | jrev = jnode = 'null' |
|
331 | 330 | else: |
|
332 | 331 | jrev = '%d' % rev |
|
333 | 332 | jnode = '"%s"' % hex(ctx.node()) |
|
334 | 333 | j = encoding.jsonescape |
|
335 | 334 | |
|
336 | 335 | if self._first: |
|
337 | 336 | self.ui.write("[\n {") |
|
338 | 337 | self._first = False |
|
339 | 338 | else: |
|
340 | 339 | self.ui.write(",\n {") |
|
341 | 340 | |
|
342 | 341 | if self.ui.quiet: |
|
343 | 342 | self.ui.write(('\n "rev": %s') % jrev) |
|
344 | 343 | self.ui.write((',\n "node": %s') % jnode) |
|
345 | 344 | self.ui.write('\n }') |
|
346 | 345 | return |
|
347 | 346 | |
|
348 | 347 | self.ui.write(('\n "rev": %s') % jrev) |
|
349 | 348 | self.ui.write((',\n "node": %s') % jnode) |
|
350 | 349 | self.ui.write((',\n "branch": "%s"') % j(ctx.branch())) |
|
351 | 350 | self.ui.write((',\n "phase": "%s"') % ctx.phasestr()) |
|
352 | 351 | self.ui.write((',\n "user": "%s"') % j(ctx.user())) |
|
353 | 352 | self.ui.write((',\n "date": [%d, %d]') % ctx.date()) |
|
354 | 353 | self.ui.write((',\n "desc": "%s"') % j(ctx.description())) |
|
355 | 354 | |
|
356 | 355 | self.ui.write((',\n "bookmarks": [%s]') % |
|
357 | 356 | ", ".join('"%s"' % j(b) for b in ctx.bookmarks())) |
|
358 | 357 | self.ui.write((',\n "tags": [%s]') % |
|
359 | 358 | ", ".join('"%s"' % j(t) for t in ctx.tags())) |
|
360 | 359 | self.ui.write((',\n "parents": [%s]') % |
|
361 | 360 | ", ".join('"%s"' % c.hex() for c in ctx.parents())) |
|
362 | 361 | |
|
363 | 362 | if self.ui.debugflag: |
|
364 | 363 | if rev is None: |
|
365 | 364 | jmanifestnode = 'null' |
|
366 | 365 | else: |
|
367 | 366 | jmanifestnode = '"%s"' % hex(ctx.manifestnode()) |
|
368 | 367 | self.ui.write((',\n "manifest": %s') % jmanifestnode) |
|
369 | 368 | |
|
370 | 369 | self.ui.write((',\n "extra": {%s}') % |
|
371 | 370 | ", ".join('"%s": "%s"' % (j(k), j(v)) |
|
372 | 371 | for k, v in ctx.extra().items())) |
|
373 | 372 | |
|
374 | 373 | files = ctx.p1().status(ctx) |
|
375 | 374 | self.ui.write((',\n "modified": [%s]') % |
|
376 | 375 | ", ".join('"%s"' % j(f) for f in files[0])) |
|
377 | 376 | self.ui.write((',\n "added": [%s]') % |
|
378 | 377 | ", ".join('"%s"' % j(f) for f in files[1])) |
|
379 | 378 | self.ui.write((',\n "removed": [%s]') % |
|
380 | 379 | ", ".join('"%s"' % j(f) for f in files[2])) |
|
381 | 380 | |
|
382 | 381 | elif self.ui.verbose: |
|
383 | 382 | self.ui.write((',\n "files": [%s]') % |
|
384 | 383 | ", ".join('"%s"' % j(f) for f in ctx.files())) |
|
385 | 384 | |
|
386 | 385 | if copies: |
|
387 | 386 | self.ui.write((',\n "copies": {%s}') % |
|
388 | 387 | ", ".join('"%s": "%s"' % (j(k), j(v)) |
|
389 | 388 | for k, v in copies)) |
|
390 | 389 | |
|
391 | 390 | stat = self.diffopts.get('stat') |
|
392 | 391 | diff = self.diffopts.get('patch') |
|
393 | 392 | diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True) |
|
394 | 393 | if stat: |
|
395 | 394 | self.ui.pushbuffer() |
|
396 | 395 | self._differ.showdiff(self.ui, ctx, diffopts, stat=True) |
|
397 | 396 | self.ui.write((',\n "diffstat": "%s"') |
|
398 | 397 | % j(self.ui.popbuffer())) |
|
399 | 398 | if diff: |
|
400 | 399 | self.ui.pushbuffer() |
|
401 | 400 | self._differ.showdiff(self.ui, ctx, diffopts, stat=False) |
|
402 | 401 | self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer())) |
|
403 | 402 | |
|
404 | 403 | self.ui.write("\n }") |
|
405 | 404 | |
|
406 | 405 | class changesettemplater(changesetprinter): |
|
407 | 406 | '''format changeset information. |
|
408 | 407 | |
|
409 | 408 | Note: there are a variety of convenience functions to build a |
|
410 | 409 | changesettemplater for common cases. See functions such as: |
|
411 | 410 | maketemplater, changesetdisplayer, buildcommittemplate, or other |
|
412 | 411 | functions that use changesest_templater. |
|
413 | 412 | ''' |
|
414 | 413 | |
|
415 | 414 | # Arguments before "buffered" used to be positional. Consider not |
|
416 | 415 | # adding/removing arguments before "buffered" to not break callers. |
|
417 | 416 | def __init__(self, ui, repo, tmplspec, differ=None, diffopts=None, |
|
418 | 417 | buffered=False): |
|
419 | 418 | changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered) |
|
420 | 419 | tres = formatter.templateresources(ui, repo) |
|
421 | 420 | self.t = formatter.loadtemplater(ui, tmplspec, |
|
422 | 421 | defaults=templatekw.keywords, |
|
423 | 422 | resources=tres, |
|
424 | 423 | cache=templatekw.defaulttempl) |
|
425 | 424 | self._counter = itertools.count() |
|
426 | 425 | self._getcache = tres['cache'] # shared with _graphnodeformatter() |
|
427 | 426 | |
|
428 | 427 | self._tref = tmplspec.ref |
|
429 | 428 | self._parts = {'header': '', 'footer': '', |
|
430 | 429 | tmplspec.ref: tmplspec.ref, |
|
431 | 430 | 'docheader': '', 'docfooter': '', |
|
432 | 431 | 'separator': ''} |
|
433 | 432 | if tmplspec.mapfile: |
|
434 | 433 | # find correct templates for current mode, for backward |
|
435 | 434 | # compatibility with 'log -v/-q/--debug' using a mapfile |
|
436 | 435 | tmplmodes = [ |
|
437 | 436 | (True, ''), |
|
438 | 437 | (self.ui.verbose, '_verbose'), |
|
439 | 438 | (self.ui.quiet, '_quiet'), |
|
440 | 439 | (self.ui.debugflag, '_debug'), |
|
441 | 440 | ] |
|
442 | 441 | for mode, postfix in tmplmodes: |
|
443 | 442 | for t in self._parts: |
|
444 | 443 | cur = t + postfix |
|
445 | 444 | if mode and cur in self.t: |
|
446 | 445 | self._parts[t] = cur |
|
447 | 446 | else: |
|
448 | 447 | partnames = [p for p in self._parts.keys() if p != tmplspec.ref] |
|
449 | 448 | m = formatter.templatepartsmap(tmplspec, self.t, partnames) |
|
450 | 449 | self._parts.update(m) |
|
451 | 450 | |
|
452 | 451 | if self._parts['docheader']: |
|
453 | self.ui.write( | |
|
454 | templateutil.stringify(self.t(self._parts['docheader']))) | |
|
452 | self.ui.write(self.t.render(self._parts['docheader'], {})) | |
|
455 | 453 | |
|
456 | 454 | def close(self): |
|
457 | 455 | if self._parts['docfooter']: |
|
458 | 456 | if not self.footer: |
|
459 | 457 | self.footer = "" |
|
460 | self.footer += templateutil.stringify( | |
|
461 | self.t(self._parts['docfooter'])) | |
|
458 | self.footer += self.t.render(self._parts['docfooter'], {}) | |
|
462 | 459 | return super(changesettemplater, self).close() |
|
463 | 460 | |
|
464 | 461 | def _show(self, ctx, copies, props): |
|
465 | 462 | '''show a single changeset or file revision''' |
|
466 | 463 | props = props.copy() |
|
467 | 464 | props['ctx'] = ctx |
|
468 | 465 | props['index'] = index = next(self._counter) |
|
469 | 466 | props['revcache'] = {'copies': copies} |
|
470 | props = pycompat.strkwargs(props) | |
|
471 | 467 | |
|
472 | 468 | # write separator, which wouldn't work well with the header part below |
|
473 | 469 | # since there's inherently a conflict between header (across items) and |
|
474 | 470 | # separator (per item) |
|
475 | 471 | if self._parts['separator'] and index > 0: |
|
476 | self.ui.write( | |
|
477 | templateutil.stringify(self.t(self._parts['separator']))) | |
|
472 | self.ui.write(self.t.render(self._parts['separator'], {})) | |
|
478 | 473 | |
|
479 | 474 | # write header |
|
480 | 475 | if self._parts['header']: |
|
481 |
h = |
|
|
476 | h = self.t.render(self._parts['header'], props) | |
|
482 | 477 | if self.buffered: |
|
483 | 478 | self.header[ctx.rev()] = h |
|
484 | 479 | else: |
|
485 | 480 | if self.lastheader != h: |
|
486 | 481 | self.lastheader = h |
|
487 | 482 | self.ui.write(h) |
|
488 | 483 | |
|
489 | 484 | # write changeset metadata, then patch if requested |
|
490 | 485 | key = self._parts[self._tref] |
|
491 |
self.ui.write( |
|
|
486 | self.ui.write(self.t.render(key, props)) | |
|
492 | 487 | self._showpatch(ctx) |
|
493 | 488 | |
|
494 | 489 | if self._parts['footer']: |
|
495 | 490 | if not self.footer: |
|
496 | self.footer = templateutil.stringify( | |
|
497 | self.t(self._parts['footer'], **props)) | |
|
491 | self.footer = self.t.render(self._parts['footer'], props) | |
|
498 | 492 | |
|
499 | 493 | def templatespec(tmpl, mapfile): |
|
500 | 494 | if mapfile: |
|
501 | 495 | return formatter.templatespec('changeset', tmpl, mapfile) |
|
502 | 496 | else: |
|
503 | 497 | return formatter.templatespec('', tmpl, None) |
|
504 | 498 | |
|
505 | 499 | def _lookuptemplate(ui, tmpl, style): |
|
506 | 500 | """Find the template matching the given template spec or style |
|
507 | 501 | |
|
508 | 502 | See formatter.lookuptemplate() for details. |
|
509 | 503 | """ |
|
510 | 504 | |
|
511 | 505 | # ui settings |
|
512 | 506 | if not tmpl and not style: # template are stronger than style |
|
513 | 507 | tmpl = ui.config('ui', 'logtemplate') |
|
514 | 508 | if tmpl: |
|
515 | 509 | return templatespec(templater.unquotestring(tmpl), None) |
|
516 | 510 | else: |
|
517 | 511 | style = util.expandpath(ui.config('ui', 'style')) |
|
518 | 512 | |
|
519 | 513 | if not tmpl and style: |
|
520 | 514 | mapfile = style |
|
521 | 515 | if not os.path.split(mapfile)[0]: |
|
522 | 516 | mapname = (templater.templatepath('map-cmdline.' + mapfile) |
|
523 | 517 | or templater.templatepath(mapfile)) |
|
524 | 518 | if mapname: |
|
525 | 519 | mapfile = mapname |
|
526 | 520 | return templatespec(None, mapfile) |
|
527 | 521 | |
|
528 | 522 | if not tmpl: |
|
529 | 523 | return templatespec(None, None) |
|
530 | 524 | |
|
531 | 525 | return formatter.lookuptemplate(ui, 'changeset', tmpl) |
|
532 | 526 | |
|
533 | 527 | def maketemplater(ui, repo, tmpl, buffered=False): |
|
534 | 528 | """Create a changesettemplater from a literal template 'tmpl' |
|
535 | 529 | byte-string.""" |
|
536 | 530 | spec = templatespec(tmpl, None) |
|
537 | 531 | return changesettemplater(ui, repo, spec, buffered=buffered) |
|
538 | 532 | |
|
539 | 533 | def changesetdisplayer(ui, repo, opts, differ=None, buffered=False): |
|
540 | 534 | """show one changeset using template or regular display. |
|
541 | 535 | |
|
542 | 536 | Display format will be the first non-empty hit of: |
|
543 | 537 | 1. option 'template' |
|
544 | 538 | 2. option 'style' |
|
545 | 539 | 3. [ui] setting 'logtemplate' |
|
546 | 540 | 4. [ui] setting 'style' |
|
547 | 541 | If all of these values are either the unset or the empty string, |
|
548 | 542 | regular display via changesetprinter() is done. |
|
549 | 543 | """ |
|
550 | 544 | postargs = (differ, opts, buffered) |
|
551 | 545 | if opts.get('template') == 'json': |
|
552 | 546 | return jsonchangeset(ui, repo, *postargs) |
|
553 | 547 | |
|
554 | 548 | spec = _lookuptemplate(ui, opts.get('template'), opts.get('style')) |
|
555 | 549 | |
|
556 | 550 | if not spec.ref and not spec.tmpl and not spec.mapfile: |
|
557 | 551 | return changesetprinter(ui, repo, *postargs) |
|
558 | 552 | |
|
559 | 553 | return changesettemplater(ui, repo, spec, *postargs) |
|
560 | 554 | |
|
561 | 555 | def _makematcher(repo, revs, pats, opts): |
|
562 | 556 | """Build matcher and expanded patterns from log options |
|
563 | 557 | |
|
564 | 558 | If --follow, revs are the revisions to follow from. |
|
565 | 559 | |
|
566 | 560 | Returns (match, pats, slowpath) where |
|
567 | 561 | - match: a matcher built from the given pats and -I/-X opts |
|
568 | 562 | - pats: patterns used (globs are expanded on Windows) |
|
569 | 563 | - slowpath: True if patterns aren't as simple as scanning filelogs |
|
570 | 564 | """ |
|
571 | 565 | # pats/include/exclude are passed to match.match() directly in |
|
572 | 566 | # _matchfiles() revset but walkchangerevs() builds its matcher with |
|
573 | 567 | # scmutil.match(). The difference is input pats are globbed on |
|
574 | 568 | # platforms without shell expansion (windows). |
|
575 | 569 | wctx = repo[None] |
|
576 | 570 | match, pats = scmutil.matchandpats(wctx, pats, opts) |
|
577 | 571 | slowpath = match.anypats() or (not match.always() and opts.get('removed')) |
|
578 | 572 | if not slowpath: |
|
579 | 573 | follow = opts.get('follow') or opts.get('follow_first') |
|
580 | 574 | startctxs = [] |
|
581 | 575 | if follow and opts.get('rev'): |
|
582 | 576 | startctxs = [repo[r] for r in revs] |
|
583 | 577 | for f in match.files(): |
|
584 | 578 | if follow and startctxs: |
|
585 | 579 | # No idea if the path was a directory at that revision, so |
|
586 | 580 | # take the slow path. |
|
587 | 581 | if any(f not in c for c in startctxs): |
|
588 | 582 | slowpath = True |
|
589 | 583 | continue |
|
590 | 584 | elif follow and f not in wctx: |
|
591 | 585 | # If the file exists, it may be a directory, so let it |
|
592 | 586 | # take the slow path. |
|
593 | 587 | if os.path.exists(repo.wjoin(f)): |
|
594 | 588 | slowpath = True |
|
595 | 589 | continue |
|
596 | 590 | else: |
|
597 | 591 | raise error.Abort(_('cannot follow file not in parent ' |
|
598 | 592 | 'revision: "%s"') % f) |
|
599 | 593 | filelog = repo.file(f) |
|
600 | 594 | if not filelog: |
|
601 | 595 | # A zero count may be a directory or deleted file, so |
|
602 | 596 | # try to find matching entries on the slow path. |
|
603 | 597 | if follow: |
|
604 | 598 | raise error.Abort( |
|
605 | 599 | _('cannot follow nonexistent file: "%s"') % f) |
|
606 | 600 | slowpath = True |
|
607 | 601 | |
|
608 | 602 | # We decided to fall back to the slowpath because at least one |
|
609 | 603 | # of the paths was not a file. Check to see if at least one of them |
|
610 | 604 | # existed in history - in that case, we'll continue down the |
|
611 | 605 | # slowpath; otherwise, we can turn off the slowpath |
|
612 | 606 | if slowpath: |
|
613 | 607 | for path in match.files(): |
|
614 | 608 | if path == '.' or path in repo.store: |
|
615 | 609 | break |
|
616 | 610 | else: |
|
617 | 611 | slowpath = False |
|
618 | 612 | |
|
619 | 613 | return match, pats, slowpath |
|
620 | 614 | |
|
621 | 615 | def _fileancestors(repo, revs, match, followfirst): |
|
622 | 616 | fctxs = [] |
|
623 | 617 | for r in revs: |
|
624 | 618 | ctx = repo[r] |
|
625 | 619 | fctxs.extend(ctx[f].introfilectx() for f in ctx.walk(match)) |
|
626 | 620 | |
|
627 | 621 | # When displaying a revision with --patch --follow FILE, we have |
|
628 | 622 | # to know which file of the revision must be diffed. With |
|
629 | 623 | # --follow, we want the names of the ancestors of FILE in the |
|
630 | 624 | # revision, stored in "fcache". "fcache" is populated as a side effect |
|
631 | 625 | # of the graph traversal. |
|
632 | 626 | fcache = {} |
|
633 | 627 | def filematcher(ctx): |
|
634 | 628 | return scmutil.matchfiles(repo, fcache.get(ctx.rev(), [])) |
|
635 | 629 | |
|
636 | 630 | def revgen(): |
|
637 | 631 | for rev, cs in dagop.filectxancestors(fctxs, followfirst=followfirst): |
|
638 | 632 | fcache[rev] = [c.path() for c in cs] |
|
639 | 633 | yield rev |
|
640 | 634 | return smartset.generatorset(revgen(), iterasc=False), filematcher |
|
641 | 635 | |
|
642 | 636 | def _makenofollowfilematcher(repo, pats, opts): |
|
643 | 637 | '''hook for extensions to override the filematcher for non-follow cases''' |
|
644 | 638 | return None |
|
645 | 639 | |
|
646 | 640 | _opt2logrevset = { |
|
647 | 641 | 'no_merges': ('not merge()', None), |
|
648 | 642 | 'only_merges': ('merge()', None), |
|
649 | 643 | '_matchfiles': (None, '_matchfiles(%ps)'), |
|
650 | 644 | 'date': ('date(%s)', None), |
|
651 | 645 | 'branch': ('branch(%s)', '%lr'), |
|
652 | 646 | '_patslog': ('filelog(%s)', '%lr'), |
|
653 | 647 | 'keyword': ('keyword(%s)', '%lr'), |
|
654 | 648 | 'prune': ('ancestors(%s)', 'not %lr'), |
|
655 | 649 | 'user': ('user(%s)', '%lr'), |
|
656 | 650 | } |
|
657 | 651 | |
|
658 | 652 | def _makerevset(repo, match, pats, slowpath, opts): |
|
659 | 653 | """Return a revset string built from log options and file patterns""" |
|
660 | 654 | opts = dict(opts) |
|
661 | 655 | # follow or not follow? |
|
662 | 656 | follow = opts.get('follow') or opts.get('follow_first') |
|
663 | 657 | |
|
664 | 658 | # branch and only_branch are really aliases and must be handled at |
|
665 | 659 | # the same time |
|
666 | 660 | opts['branch'] = opts.get('branch', []) + opts.get('only_branch', []) |
|
667 | 661 | opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']] |
|
668 | 662 | |
|
669 | 663 | if slowpath: |
|
670 | 664 | # See walkchangerevs() slow path. |
|
671 | 665 | # |
|
672 | 666 | # pats/include/exclude cannot be represented as separate |
|
673 | 667 | # revset expressions as their filtering logic applies at file |
|
674 | 668 | # level. For instance "-I a -X b" matches a revision touching |
|
675 | 669 | # "a" and "b" while "file(a) and not file(b)" does |
|
676 | 670 | # not. Besides, filesets are evaluated against the working |
|
677 | 671 | # directory. |
|
678 | 672 | matchargs = ['r:', 'd:relpath'] |
|
679 | 673 | for p in pats: |
|
680 | 674 | matchargs.append('p:' + p) |
|
681 | 675 | for p in opts.get('include', []): |
|
682 | 676 | matchargs.append('i:' + p) |
|
683 | 677 | for p in opts.get('exclude', []): |
|
684 | 678 | matchargs.append('x:' + p) |
|
685 | 679 | opts['_matchfiles'] = matchargs |
|
686 | 680 | elif not follow: |
|
687 | 681 | opts['_patslog'] = list(pats) |
|
688 | 682 | |
|
689 | 683 | expr = [] |
|
690 | 684 | for op, val in sorted(opts.iteritems()): |
|
691 | 685 | if not val: |
|
692 | 686 | continue |
|
693 | 687 | if op not in _opt2logrevset: |
|
694 | 688 | continue |
|
695 | 689 | revop, listop = _opt2logrevset[op] |
|
696 | 690 | if revop and '%' not in revop: |
|
697 | 691 | expr.append(revop) |
|
698 | 692 | elif not listop: |
|
699 | 693 | expr.append(revsetlang.formatspec(revop, val)) |
|
700 | 694 | else: |
|
701 | 695 | if revop: |
|
702 | 696 | val = [revsetlang.formatspec(revop, v) for v in val] |
|
703 | 697 | expr.append(revsetlang.formatspec(listop, val)) |
|
704 | 698 | |
|
705 | 699 | if expr: |
|
706 | 700 | expr = '(' + ' and '.join(expr) + ')' |
|
707 | 701 | else: |
|
708 | 702 | expr = None |
|
709 | 703 | return expr |
|
710 | 704 | |
|
711 | 705 | def _initialrevs(repo, opts): |
|
712 | 706 | """Return the initial set of revisions to be filtered or followed""" |
|
713 | 707 | follow = opts.get('follow') or opts.get('follow_first') |
|
714 | 708 | if opts.get('rev'): |
|
715 | 709 | revs = scmutil.revrange(repo, opts['rev']) |
|
716 | 710 | elif follow and repo.dirstate.p1() == nullid: |
|
717 | 711 | revs = smartset.baseset() |
|
718 | 712 | elif follow: |
|
719 | 713 | revs = repo.revs('.') |
|
720 | 714 | else: |
|
721 | 715 | revs = smartset.spanset(repo) |
|
722 | 716 | revs.reverse() |
|
723 | 717 | return revs |
|
724 | 718 | |
|
725 | 719 | def getrevs(repo, pats, opts): |
|
726 | 720 | """Return (revs, differ) where revs is a smartset |
|
727 | 721 | |
|
728 | 722 | differ is a changesetdiffer with pre-configured file matcher. |
|
729 | 723 | """ |
|
730 | 724 | follow = opts.get('follow') or opts.get('follow_first') |
|
731 | 725 | followfirst = opts.get('follow_first') |
|
732 | 726 | limit = getlimit(opts) |
|
733 | 727 | revs = _initialrevs(repo, opts) |
|
734 | 728 | if not revs: |
|
735 | 729 | return smartset.baseset(), None |
|
736 | 730 | match, pats, slowpath = _makematcher(repo, revs, pats, opts) |
|
737 | 731 | filematcher = None |
|
738 | 732 | if follow: |
|
739 | 733 | if slowpath or match.always(): |
|
740 | 734 | revs = dagop.revancestors(repo, revs, followfirst=followfirst) |
|
741 | 735 | else: |
|
742 | 736 | revs, filematcher = _fileancestors(repo, revs, match, followfirst) |
|
743 | 737 | revs.reverse() |
|
744 | 738 | if filematcher is None: |
|
745 | 739 | filematcher = _makenofollowfilematcher(repo, pats, opts) |
|
746 | 740 | if filematcher is None: |
|
747 | 741 | def filematcher(ctx): |
|
748 | 742 | return match |
|
749 | 743 | |
|
750 | 744 | expr = _makerevset(repo, match, pats, slowpath, opts) |
|
751 | 745 | if opts.get('graph') and opts.get('rev'): |
|
752 | 746 | # User-specified revs might be unsorted, but don't sort before |
|
753 | 747 | # _makerevset because it might depend on the order of revs |
|
754 | 748 | if not (revs.isdescending() or revs.istopo()): |
|
755 | 749 | revs.sort(reverse=True) |
|
756 | 750 | if expr: |
|
757 | 751 | matcher = revset.match(None, expr) |
|
758 | 752 | revs = matcher(repo, revs) |
|
759 | 753 | if limit is not None: |
|
760 | 754 | revs = revs.slice(0, limit) |
|
761 | 755 | |
|
762 | 756 | differ = changesetdiffer() |
|
763 | 757 | differ._makefilematcher = filematcher |
|
764 | 758 | return revs, differ |
|
765 | 759 | |
|
766 | 760 | def _parselinerangeopt(repo, opts): |
|
767 | 761 | """Parse --line-range log option and return a list of tuples (filename, |
|
768 | 762 | (fromline, toline)). |
|
769 | 763 | """ |
|
770 | 764 | linerangebyfname = [] |
|
771 | 765 | for pat in opts.get('line_range', []): |
|
772 | 766 | try: |
|
773 | 767 | pat, linerange = pat.rsplit(',', 1) |
|
774 | 768 | except ValueError: |
|
775 | 769 | raise error.Abort(_('malformatted line-range pattern %s') % pat) |
|
776 | 770 | try: |
|
777 | 771 | fromline, toline = map(int, linerange.split(':')) |
|
778 | 772 | except ValueError: |
|
779 | 773 | raise error.Abort(_("invalid line range for %s") % pat) |
|
780 | 774 | msg = _("line range pattern '%s' must match exactly one file") % pat |
|
781 | 775 | fname = scmutil.parsefollowlinespattern(repo, None, pat, msg) |
|
782 | 776 | linerangebyfname.append( |
|
783 | 777 | (fname, util.processlinerange(fromline, toline))) |
|
784 | 778 | return linerangebyfname |
|
785 | 779 | |
|
786 | 780 | def getlinerangerevs(repo, userrevs, opts): |
|
787 | 781 | """Return (revs, differ). |
|
788 | 782 | |
|
789 | 783 | "revs" are revisions obtained by processing "line-range" log options and |
|
790 | 784 | walking block ancestors of each specified file/line-range. |
|
791 | 785 | |
|
792 | 786 | "differ" is a changesetdiffer with pre-configured file matcher and hunks |
|
793 | 787 | filter. |
|
794 | 788 | """ |
|
795 | 789 | wctx = repo[None] |
|
796 | 790 | |
|
797 | 791 | # Two-levels map of "rev -> file ctx -> [line range]". |
|
798 | 792 | linerangesbyrev = {} |
|
799 | 793 | for fname, (fromline, toline) in _parselinerangeopt(repo, opts): |
|
800 | 794 | if fname not in wctx: |
|
801 | 795 | raise error.Abort(_('cannot follow file not in parent ' |
|
802 | 796 | 'revision: "%s"') % fname) |
|
803 | 797 | fctx = wctx.filectx(fname) |
|
804 | 798 | for fctx, linerange in dagop.blockancestors(fctx, fromline, toline): |
|
805 | 799 | rev = fctx.introrev() |
|
806 | 800 | if rev not in userrevs: |
|
807 | 801 | continue |
|
808 | 802 | linerangesbyrev.setdefault( |
|
809 | 803 | rev, {}).setdefault( |
|
810 | 804 | fctx.path(), []).append(linerange) |
|
811 | 805 | |
|
812 | 806 | def nofilterhunksfn(fctx, hunks): |
|
813 | 807 | return hunks |
|
814 | 808 | |
|
815 | 809 | def hunksfilter(ctx): |
|
816 | 810 | fctxlineranges = linerangesbyrev.get(ctx.rev()) |
|
817 | 811 | if fctxlineranges is None: |
|
818 | 812 | return nofilterhunksfn |
|
819 | 813 | |
|
820 | 814 | def filterfn(fctx, hunks): |
|
821 | 815 | lineranges = fctxlineranges.get(fctx.path()) |
|
822 | 816 | if lineranges is not None: |
|
823 | 817 | for hr, lines in hunks: |
|
824 | 818 | if hr is None: # binary |
|
825 | 819 | yield hr, lines |
|
826 | 820 | continue |
|
827 | 821 | if any(mdiff.hunkinrange(hr[2:], lr) |
|
828 | 822 | for lr in lineranges): |
|
829 | 823 | yield hr, lines |
|
830 | 824 | else: |
|
831 | 825 | for hunk in hunks: |
|
832 | 826 | yield hunk |
|
833 | 827 | |
|
834 | 828 | return filterfn |
|
835 | 829 | |
|
836 | 830 | def filematcher(ctx): |
|
837 | 831 | files = list(linerangesbyrev.get(ctx.rev(), [])) |
|
838 | 832 | return scmutil.matchfiles(repo, files) |
|
839 | 833 | |
|
840 | 834 | revs = sorted(linerangesbyrev, reverse=True) |
|
841 | 835 | |
|
842 | 836 | differ = changesetdiffer() |
|
843 | 837 | differ._makefilematcher = filematcher |
|
844 | 838 | differ._makehunksfilter = hunksfilter |
|
845 | 839 | return revs, differ |
|
846 | 840 | |
|
847 | 841 | def _graphnodeformatter(ui, displayer): |
|
848 | 842 | spec = ui.config('ui', 'graphnodetemplate') |
|
849 | 843 | if not spec: |
|
850 | 844 | return templatekw.getgraphnode # fast path for "{graphnode}" |
|
851 | 845 | |
|
852 | 846 | spec = templater.unquotestring(spec) |
|
853 | 847 | tres = formatter.templateresources(ui) |
|
854 | 848 | if isinstance(displayer, changesettemplater): |
|
855 | 849 | # reuse cache of slow templates |
|
856 | 850 | tres['cache'] = displayer._getcache |
|
857 | 851 | templ = formatter.maketemplater(ui, spec, defaults=templatekw.keywords, |
|
858 | 852 | resources=tres) |
|
859 | 853 | def formatnode(repo, ctx): |
|
860 | 854 | props = {'ctx': ctx, 'repo': repo, 'revcache': {}} |
|
861 | 855 | return templ.renderdefault(props) |
|
862 | 856 | return formatnode |
|
863 | 857 | |
|
864 | 858 | def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None, props=None): |
|
865 | 859 | props = props or {} |
|
866 | 860 | formatnode = _graphnodeformatter(ui, displayer) |
|
867 | 861 | state = graphmod.asciistate() |
|
868 | 862 | styles = state['styles'] |
|
869 | 863 | |
|
870 | 864 | # only set graph styling if HGPLAIN is not set. |
|
871 | 865 | if ui.plain('graph'): |
|
872 | 866 | # set all edge styles to |, the default pre-3.8 behaviour |
|
873 | 867 | styles.update(dict.fromkeys(styles, '|')) |
|
874 | 868 | else: |
|
875 | 869 | edgetypes = { |
|
876 | 870 | 'parent': graphmod.PARENT, |
|
877 | 871 | 'grandparent': graphmod.GRANDPARENT, |
|
878 | 872 | 'missing': graphmod.MISSINGPARENT |
|
879 | 873 | } |
|
880 | 874 | for name, key in edgetypes.items(): |
|
881 | 875 | # experimental config: experimental.graphstyle.* |
|
882 | 876 | styles[key] = ui.config('experimental', 'graphstyle.%s' % name, |
|
883 | 877 | styles[key]) |
|
884 | 878 | if not styles[key]: |
|
885 | 879 | styles[key] = None |
|
886 | 880 | |
|
887 | 881 | # experimental config: experimental.graphshorten |
|
888 | 882 | state['graphshorten'] = ui.configbool('experimental', 'graphshorten') |
|
889 | 883 | |
|
890 | 884 | for rev, type, ctx, parents in dag: |
|
891 | 885 | char = formatnode(repo, ctx) |
|
892 | 886 | copies = None |
|
893 | 887 | if getrenamed and ctx.rev(): |
|
894 | 888 | copies = [] |
|
895 | 889 | for fn in ctx.files(): |
|
896 | 890 | rename = getrenamed(fn, ctx.rev()) |
|
897 | 891 | if rename: |
|
898 | 892 | copies.append((fn, rename[0])) |
|
899 | 893 | edges = edgefn(type, char, state, rev, parents) |
|
900 | 894 | firstedge = next(edges) |
|
901 | 895 | width = firstedge[2] |
|
902 | 896 | displayer.show(ctx, copies=copies, |
|
903 | 897 | graphwidth=width, **pycompat.strkwargs(props)) |
|
904 | 898 | lines = displayer.hunk.pop(rev).split('\n') |
|
905 | 899 | if not lines[-1]: |
|
906 | 900 | del lines[-1] |
|
907 | 901 | displayer.flush(ctx) |
|
908 | 902 | for type, char, width, coldata in itertools.chain([firstedge], edges): |
|
909 | 903 | graphmod.ascii(ui, state, type, char, lines, coldata) |
|
910 | 904 | lines = [] |
|
911 | 905 | displayer.close() |
|
912 | 906 | |
|
913 | 907 | def displaygraphrevs(ui, repo, revs, displayer, getrenamed): |
|
914 | 908 | revdag = graphmod.dagwalker(repo, revs) |
|
915 | 909 | displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed) |
|
916 | 910 | |
|
917 | 911 | def displayrevs(ui, repo, revs, displayer, getrenamed): |
|
918 | 912 | for rev in revs: |
|
919 | 913 | ctx = repo[rev] |
|
920 | 914 | copies = None |
|
921 | 915 | if getrenamed is not None and rev: |
|
922 | 916 | copies = [] |
|
923 | 917 | for fn in ctx.files(): |
|
924 | 918 | rename = getrenamed(fn, rev) |
|
925 | 919 | if rename: |
|
926 | 920 | copies.append((fn, rename[0])) |
|
927 | 921 | displayer.show(ctx, copies=copies) |
|
928 | 922 | displayer.flush(ctx) |
|
929 | 923 | displayer.close() |
|
930 | 924 | |
|
931 | 925 | def checkunsupportedgraphflags(pats, opts): |
|
932 | 926 | for op in ["newest_first"]: |
|
933 | 927 | if op in opts and opts[op]: |
|
934 | 928 | raise error.Abort(_("-G/--graph option is incompatible with --%s") |
|
935 | 929 | % op.replace("_", "-")) |
|
936 | 930 | |
|
937 | 931 | def graphrevs(repo, nodes, opts): |
|
938 | 932 | limit = getlimit(opts) |
|
939 | 933 | nodes.reverse() |
|
940 | 934 | if limit is not None: |
|
941 | 935 | nodes = nodes[:limit] |
|
942 | 936 | return graphmod.nodes(repo, nodes) |
@@ -1,797 +1,801 b'' | |||
|
1 | 1 | # templater.py - template expansion for output |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import, print_function |
|
9 | 9 | |
|
10 | 10 | import os |
|
11 | 11 | |
|
12 | 12 | from .i18n import _ |
|
13 | 13 | from . import ( |
|
14 | 14 | config, |
|
15 | 15 | encoding, |
|
16 | 16 | error, |
|
17 | 17 | parser, |
|
18 | 18 | pycompat, |
|
19 | 19 | templatefilters, |
|
20 | 20 | templatefuncs, |
|
21 | 21 | templateutil, |
|
22 | 22 | util, |
|
23 | 23 | ) |
|
24 | 24 | |
|
25 | 25 | # template parsing |
|
26 | 26 | |
|
27 | 27 | elements = { |
|
28 | 28 | # token-type: binding-strength, primary, prefix, infix, suffix |
|
29 | 29 | "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None), |
|
30 | 30 | ".": (18, None, None, (".", 18), None), |
|
31 | 31 | "%": (15, None, None, ("%", 15), None), |
|
32 | 32 | "|": (15, None, None, ("|", 15), None), |
|
33 | 33 | "*": (5, None, None, ("*", 5), None), |
|
34 | 34 | "/": (5, None, None, ("/", 5), None), |
|
35 | 35 | "+": (4, None, None, ("+", 4), None), |
|
36 | 36 | "-": (4, None, ("negate", 19), ("-", 4), None), |
|
37 | 37 | "=": (3, None, None, ("keyvalue", 3), None), |
|
38 | 38 | ",": (2, None, None, ("list", 2), None), |
|
39 | 39 | ")": (0, None, None, None, None), |
|
40 | 40 | "integer": (0, "integer", None, None, None), |
|
41 | 41 | "symbol": (0, "symbol", None, None, None), |
|
42 | 42 | "string": (0, "string", None, None, None), |
|
43 | 43 | "template": (0, "template", None, None, None), |
|
44 | 44 | "end": (0, None, None, None, None), |
|
45 | 45 | } |
|
46 | 46 | |
|
47 | 47 | def tokenize(program, start, end, term=None): |
|
48 | 48 | """Parse a template expression into a stream of tokens, which must end |
|
49 | 49 | with term if specified""" |
|
50 | 50 | pos = start |
|
51 | 51 | program = pycompat.bytestr(program) |
|
52 | 52 | while pos < end: |
|
53 | 53 | c = program[pos] |
|
54 | 54 | if c.isspace(): # skip inter-token whitespace |
|
55 | 55 | pass |
|
56 | 56 | elif c in "(=,).%|+-*/": # handle simple operators |
|
57 | 57 | yield (c, None, pos) |
|
58 | 58 | elif c in '"\'': # handle quoted templates |
|
59 | 59 | s = pos + 1 |
|
60 | 60 | data, pos = _parsetemplate(program, s, end, c) |
|
61 | 61 | yield ('template', data, s) |
|
62 | 62 | pos -= 1 |
|
63 | 63 | elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'): |
|
64 | 64 | # handle quoted strings |
|
65 | 65 | c = program[pos + 1] |
|
66 | 66 | s = pos = pos + 2 |
|
67 | 67 | while pos < end: # find closing quote |
|
68 | 68 | d = program[pos] |
|
69 | 69 | if d == '\\': # skip over escaped characters |
|
70 | 70 | pos += 2 |
|
71 | 71 | continue |
|
72 | 72 | if d == c: |
|
73 | 73 | yield ('string', program[s:pos], s) |
|
74 | 74 | break |
|
75 | 75 | pos += 1 |
|
76 | 76 | else: |
|
77 | 77 | raise error.ParseError(_("unterminated string"), s) |
|
78 | 78 | elif c.isdigit(): |
|
79 | 79 | s = pos |
|
80 | 80 | while pos < end: |
|
81 | 81 | d = program[pos] |
|
82 | 82 | if not d.isdigit(): |
|
83 | 83 | break |
|
84 | 84 | pos += 1 |
|
85 | 85 | yield ('integer', program[s:pos], s) |
|
86 | 86 | pos -= 1 |
|
87 | 87 | elif (c == '\\' and program[pos:pos + 2] in (br"\'", br'\"') |
|
88 | 88 | or c == 'r' and program[pos:pos + 3] in (br"r\'", br'r\"')): |
|
89 | 89 | # handle escaped quoted strings for compatibility with 2.9.2-3.4, |
|
90 | 90 | # where some of nested templates were preprocessed as strings and |
|
91 | 91 | # then compiled. therefore, \"...\" was allowed. (issue4733) |
|
92 | 92 | # |
|
93 | 93 | # processing flow of _evalifliteral() at 5ab28a2e9962: |
|
94 | 94 | # outer template string -> stringify() -> compiletemplate() |
|
95 | 95 | # ------------------------ ------------ ------------------ |
|
96 | 96 | # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}] |
|
97 | 97 | # ~~~~~~~~ |
|
98 | 98 | # escaped quoted string |
|
99 | 99 | if c == 'r': |
|
100 | 100 | pos += 1 |
|
101 | 101 | token = 'string' |
|
102 | 102 | else: |
|
103 | 103 | token = 'template' |
|
104 | 104 | quote = program[pos:pos + 2] |
|
105 | 105 | s = pos = pos + 2 |
|
106 | 106 | while pos < end: # find closing escaped quote |
|
107 | 107 | if program.startswith('\\\\\\', pos, end): |
|
108 | 108 | pos += 4 # skip over double escaped characters |
|
109 | 109 | continue |
|
110 | 110 | if program.startswith(quote, pos, end): |
|
111 | 111 | # interpret as if it were a part of an outer string |
|
112 | 112 | data = parser.unescapestr(program[s:pos]) |
|
113 | 113 | if token == 'template': |
|
114 | 114 | data = _parsetemplate(data, 0, len(data))[0] |
|
115 | 115 | yield (token, data, s) |
|
116 | 116 | pos += 1 |
|
117 | 117 | break |
|
118 | 118 | pos += 1 |
|
119 | 119 | else: |
|
120 | 120 | raise error.ParseError(_("unterminated string"), s) |
|
121 | 121 | elif c.isalnum() or c in '_': |
|
122 | 122 | s = pos |
|
123 | 123 | pos += 1 |
|
124 | 124 | while pos < end: # find end of symbol |
|
125 | 125 | d = program[pos] |
|
126 | 126 | if not (d.isalnum() or d == "_"): |
|
127 | 127 | break |
|
128 | 128 | pos += 1 |
|
129 | 129 | sym = program[s:pos] |
|
130 | 130 | yield ('symbol', sym, s) |
|
131 | 131 | pos -= 1 |
|
132 | 132 | elif c == term: |
|
133 | 133 | yield ('end', None, pos) |
|
134 | 134 | return |
|
135 | 135 | else: |
|
136 | 136 | raise error.ParseError(_("syntax error"), pos) |
|
137 | 137 | pos += 1 |
|
138 | 138 | if term: |
|
139 | 139 | raise error.ParseError(_("unterminated template expansion"), start) |
|
140 | 140 | yield ('end', None, pos) |
|
141 | 141 | |
|
142 | 142 | def _parsetemplate(tmpl, start, stop, quote=''): |
|
143 | 143 | r""" |
|
144 | 144 | >>> _parsetemplate(b'foo{bar}"baz', 0, 12) |
|
145 | 145 | ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12) |
|
146 | 146 | >>> _parsetemplate(b'foo{bar}"baz', 0, 12, quote=b'"') |
|
147 | 147 | ([('string', 'foo'), ('symbol', 'bar')], 9) |
|
148 | 148 | >>> _parsetemplate(b'foo"{bar}', 0, 9, quote=b'"') |
|
149 | 149 | ([('string', 'foo')], 4) |
|
150 | 150 | >>> _parsetemplate(br'foo\"bar"baz', 0, 12, quote=b'"') |
|
151 | 151 | ([('string', 'foo"'), ('string', 'bar')], 9) |
|
152 | 152 | >>> _parsetemplate(br'foo\\"bar', 0, 10, quote=b'"') |
|
153 | 153 | ([('string', 'foo\\')], 6) |
|
154 | 154 | """ |
|
155 | 155 | parsed = [] |
|
156 | 156 | for typ, val, pos in _scantemplate(tmpl, start, stop, quote): |
|
157 | 157 | if typ == 'string': |
|
158 | 158 | parsed.append((typ, val)) |
|
159 | 159 | elif typ == 'template': |
|
160 | 160 | parsed.append(val) |
|
161 | 161 | elif typ == 'end': |
|
162 | 162 | return parsed, pos |
|
163 | 163 | else: |
|
164 | 164 | raise error.ProgrammingError('unexpected type: %s' % typ) |
|
165 | 165 | raise error.ProgrammingError('unterminated scanning of template') |
|
166 | 166 | |
|
167 | 167 | def scantemplate(tmpl, raw=False): |
|
168 | 168 | r"""Scan (type, start, end) positions of outermost elements in template |
|
169 | 169 | |
|
170 | 170 | If raw=True, a backslash is not taken as an escape character just like |
|
171 | 171 | r'' string in Python. Note that this is different from r'' literal in |
|
172 | 172 | template in that no template fragment can appear in r'', e.g. r'{foo}' |
|
173 | 173 | is a literal '{foo}', but ('{foo}', raw=True) is a template expression |
|
174 | 174 | 'foo'. |
|
175 | 175 | |
|
176 | 176 | >>> list(scantemplate(b'foo{bar}"baz')) |
|
177 | 177 | [('string', 0, 3), ('template', 3, 8), ('string', 8, 12)] |
|
178 | 178 | >>> list(scantemplate(b'outer{"inner"}outer')) |
|
179 | 179 | [('string', 0, 5), ('template', 5, 14), ('string', 14, 19)] |
|
180 | 180 | >>> list(scantemplate(b'foo\\{escaped}')) |
|
181 | 181 | [('string', 0, 5), ('string', 5, 13)] |
|
182 | 182 | >>> list(scantemplate(b'foo\\{escaped}', raw=True)) |
|
183 | 183 | [('string', 0, 4), ('template', 4, 13)] |
|
184 | 184 | """ |
|
185 | 185 | last = None |
|
186 | 186 | for typ, val, pos in _scantemplate(tmpl, 0, len(tmpl), raw=raw): |
|
187 | 187 | if last: |
|
188 | 188 | yield last + (pos,) |
|
189 | 189 | if typ == 'end': |
|
190 | 190 | return |
|
191 | 191 | else: |
|
192 | 192 | last = (typ, pos) |
|
193 | 193 | raise error.ProgrammingError('unterminated scanning of template') |
|
194 | 194 | |
|
195 | 195 | def _scantemplate(tmpl, start, stop, quote='', raw=False): |
|
196 | 196 | """Parse template string into chunks of strings and template expressions""" |
|
197 | 197 | sepchars = '{' + quote |
|
198 | 198 | unescape = [parser.unescapestr, pycompat.identity][raw] |
|
199 | 199 | pos = start |
|
200 | 200 | p = parser.parser(elements) |
|
201 | 201 | try: |
|
202 | 202 | while pos < stop: |
|
203 | 203 | n = min((tmpl.find(c, pos, stop) for c in sepchars), |
|
204 | 204 | key=lambda n: (n < 0, n)) |
|
205 | 205 | if n < 0: |
|
206 | 206 | yield ('string', unescape(tmpl[pos:stop]), pos) |
|
207 | 207 | pos = stop |
|
208 | 208 | break |
|
209 | 209 | c = tmpl[n:n + 1] |
|
210 | 210 | bs = 0 # count leading backslashes |
|
211 | 211 | if not raw: |
|
212 | 212 | bs = (n - pos) - len(tmpl[pos:n].rstrip('\\')) |
|
213 | 213 | if bs % 2 == 1: |
|
214 | 214 | # escaped (e.g. '\{', '\\\{', but not '\\{') |
|
215 | 215 | yield ('string', unescape(tmpl[pos:n - 1]) + c, pos) |
|
216 | 216 | pos = n + 1 |
|
217 | 217 | continue |
|
218 | 218 | if n > pos: |
|
219 | 219 | yield ('string', unescape(tmpl[pos:n]), pos) |
|
220 | 220 | if c == quote: |
|
221 | 221 | yield ('end', None, n + 1) |
|
222 | 222 | return |
|
223 | 223 | |
|
224 | 224 | parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}')) |
|
225 | 225 | if not tmpl.startswith('}', pos): |
|
226 | 226 | raise error.ParseError(_("invalid token"), pos) |
|
227 | 227 | yield ('template', parseres, n) |
|
228 | 228 | pos += 1 |
|
229 | 229 | |
|
230 | 230 | if quote: |
|
231 | 231 | raise error.ParseError(_("unterminated string"), start) |
|
232 | 232 | except error.ParseError as inst: |
|
233 | 233 | if len(inst.args) > 1: # has location |
|
234 | 234 | loc = inst.args[1] |
|
235 | 235 | # Offset the caret location by the number of newlines before the |
|
236 | 236 | # location of the error, since we will replace one-char newlines |
|
237 | 237 | # with the two-char literal r'\n'. |
|
238 | 238 | offset = tmpl[:loc].count('\n') |
|
239 | 239 | tmpl = tmpl.replace('\n', br'\n') |
|
240 | 240 | # We want the caret to point to the place in the template that |
|
241 | 241 | # failed to parse, but in a hint we get a open paren at the |
|
242 | 242 | # start. Therefore, we print "loc + 1" spaces (instead of "loc") |
|
243 | 243 | # to line up the caret with the location of the error. |
|
244 | 244 | inst.hint = (tmpl + '\n' |
|
245 | 245 | + ' ' * (loc + 1 + offset) + '^ ' + _('here')) |
|
246 | 246 | raise |
|
247 | 247 | yield ('end', None, pos) |
|
248 | 248 | |
|
249 | 249 | def _unnesttemplatelist(tree): |
|
250 | 250 | """Expand list of templates to node tuple |
|
251 | 251 | |
|
252 | 252 | >>> def f(tree): |
|
253 | 253 | ... print(pycompat.sysstr(prettyformat(_unnesttemplatelist(tree)))) |
|
254 | 254 | >>> f((b'template', [])) |
|
255 | 255 | (string '') |
|
256 | 256 | >>> f((b'template', [(b'string', b'foo')])) |
|
257 | 257 | (string 'foo') |
|
258 | 258 | >>> f((b'template', [(b'string', b'foo'), (b'symbol', b'rev')])) |
|
259 | 259 | (template |
|
260 | 260 | (string 'foo') |
|
261 | 261 | (symbol 'rev')) |
|
262 | 262 | >>> f((b'template', [(b'symbol', b'rev')])) # template(rev) -> str |
|
263 | 263 | (template |
|
264 | 264 | (symbol 'rev')) |
|
265 | 265 | >>> f((b'template', [(b'template', [(b'string', b'foo')])])) |
|
266 | 266 | (string 'foo') |
|
267 | 267 | """ |
|
268 | 268 | if not isinstance(tree, tuple): |
|
269 | 269 | return tree |
|
270 | 270 | op = tree[0] |
|
271 | 271 | if op != 'template': |
|
272 | 272 | return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:]) |
|
273 | 273 | |
|
274 | 274 | assert len(tree) == 2 |
|
275 | 275 | xs = tuple(_unnesttemplatelist(x) for x in tree[1]) |
|
276 | 276 | if not xs: |
|
277 | 277 | return ('string', '') # empty template "" |
|
278 | 278 | elif len(xs) == 1 and xs[0][0] == 'string': |
|
279 | 279 | return xs[0] # fast path for string with no template fragment "x" |
|
280 | 280 | else: |
|
281 | 281 | return (op,) + xs |
|
282 | 282 | |
|
283 | 283 | def parse(tmpl): |
|
284 | 284 | """Parse template string into tree""" |
|
285 | 285 | parsed, pos = _parsetemplate(tmpl, 0, len(tmpl)) |
|
286 | 286 | assert pos == len(tmpl), 'unquoted template should be consumed' |
|
287 | 287 | return _unnesttemplatelist(('template', parsed)) |
|
288 | 288 | |
|
289 | 289 | def _parseexpr(expr): |
|
290 | 290 | """Parse a template expression into tree |
|
291 | 291 | |
|
292 | 292 | >>> _parseexpr(b'"foo"') |
|
293 | 293 | ('string', 'foo') |
|
294 | 294 | >>> _parseexpr(b'foo(bar)') |
|
295 | 295 | ('func', ('symbol', 'foo'), ('symbol', 'bar')) |
|
296 | 296 | >>> _parseexpr(b'foo(') |
|
297 | 297 | Traceback (most recent call last): |
|
298 | 298 | ... |
|
299 | 299 | ParseError: ('not a prefix: end', 4) |
|
300 | 300 | >>> _parseexpr(b'"foo" "bar"') |
|
301 | 301 | Traceback (most recent call last): |
|
302 | 302 | ... |
|
303 | 303 | ParseError: ('invalid token', 7) |
|
304 | 304 | """ |
|
305 | 305 | p = parser.parser(elements) |
|
306 | 306 | tree, pos = p.parse(tokenize(expr, 0, len(expr))) |
|
307 | 307 | if pos != len(expr): |
|
308 | 308 | raise error.ParseError(_('invalid token'), pos) |
|
309 | 309 | return _unnesttemplatelist(tree) |
|
310 | 310 | |
|
311 | 311 | def prettyformat(tree): |
|
312 | 312 | return parser.prettyformat(tree, ('integer', 'string', 'symbol')) |
|
313 | 313 | |
|
314 | 314 | def compileexp(exp, context, curmethods): |
|
315 | 315 | """Compile parsed template tree to (func, data) pair""" |
|
316 | 316 | if not exp: |
|
317 | 317 | raise error.ParseError(_("missing argument")) |
|
318 | 318 | t = exp[0] |
|
319 | 319 | if t in curmethods: |
|
320 | 320 | return curmethods[t](exp, context) |
|
321 | 321 | raise error.ParseError(_("unknown method '%s'") % t) |
|
322 | 322 | |
|
323 | 323 | # template evaluation |
|
324 | 324 | |
|
325 | 325 | def getsymbol(exp): |
|
326 | 326 | if exp[0] == 'symbol': |
|
327 | 327 | return exp[1] |
|
328 | 328 | raise error.ParseError(_("expected a symbol, got '%s'") % exp[0]) |
|
329 | 329 | |
|
330 | 330 | def getlist(x): |
|
331 | 331 | if not x: |
|
332 | 332 | return [] |
|
333 | 333 | if x[0] == 'list': |
|
334 | 334 | return getlist(x[1]) + [x[2]] |
|
335 | 335 | return [x] |
|
336 | 336 | |
|
337 | 337 | def gettemplate(exp, context): |
|
338 | 338 | """Compile given template tree or load named template from map file; |
|
339 | 339 | returns (func, data) pair""" |
|
340 | 340 | if exp[0] in ('template', 'string'): |
|
341 | 341 | return compileexp(exp, context, methods) |
|
342 | 342 | if exp[0] == 'symbol': |
|
343 | 343 | # unlike runsymbol(), here 'symbol' is always taken as template name |
|
344 | 344 | # even if it exists in mapping. this allows us to override mapping |
|
345 | 345 | # by web templates, e.g. 'changelogtag' is redefined in map file. |
|
346 | 346 | return context._load(exp[1]) |
|
347 | 347 | raise error.ParseError(_("expected template specifier")) |
|
348 | 348 | |
|
349 | 349 | def _runrecursivesymbol(context, mapping, key): |
|
350 | 350 | raise error.Abort(_("recursive reference '%s' in template") % key) |
|
351 | 351 | |
|
352 | 352 | def buildtemplate(exp, context): |
|
353 | 353 | ctmpl = [compileexp(e, context, methods) for e in exp[1:]] |
|
354 | 354 | return (templateutil.runtemplate, ctmpl) |
|
355 | 355 | |
|
356 | 356 | def buildfilter(exp, context): |
|
357 | 357 | n = getsymbol(exp[2]) |
|
358 | 358 | if n in context._filters: |
|
359 | 359 | filt = context._filters[n] |
|
360 | 360 | arg = compileexp(exp[1], context, methods) |
|
361 | 361 | return (templateutil.runfilter, (arg, filt)) |
|
362 | 362 | if n in context._funcs: |
|
363 | 363 | f = context._funcs[n] |
|
364 | 364 | args = _buildfuncargs(exp[1], context, methods, n, f._argspec) |
|
365 | 365 | return (f, args) |
|
366 | 366 | raise error.ParseError(_("unknown function '%s'") % n) |
|
367 | 367 | |
|
368 | 368 | def buildmap(exp, context): |
|
369 | 369 | darg = compileexp(exp[1], context, methods) |
|
370 | 370 | targ = gettemplate(exp[2], context) |
|
371 | 371 | return (templateutil.runmap, (darg, targ)) |
|
372 | 372 | |
|
373 | 373 | def buildmember(exp, context): |
|
374 | 374 | darg = compileexp(exp[1], context, methods) |
|
375 | 375 | memb = getsymbol(exp[2]) |
|
376 | 376 | return (templateutil.runmember, (darg, memb)) |
|
377 | 377 | |
|
378 | 378 | def buildnegate(exp, context): |
|
379 | 379 | arg = compileexp(exp[1], context, exprmethods) |
|
380 | 380 | return (templateutil.runnegate, arg) |
|
381 | 381 | |
|
382 | 382 | def buildarithmetic(exp, context, func): |
|
383 | 383 | left = compileexp(exp[1], context, exprmethods) |
|
384 | 384 | right = compileexp(exp[2], context, exprmethods) |
|
385 | 385 | return (templateutil.runarithmetic, (func, left, right)) |
|
386 | 386 | |
|
387 | 387 | def buildfunc(exp, context): |
|
388 | 388 | n = getsymbol(exp[1]) |
|
389 | 389 | if n in context._funcs: |
|
390 | 390 | f = context._funcs[n] |
|
391 | 391 | args = _buildfuncargs(exp[2], context, exprmethods, n, f._argspec) |
|
392 | 392 | return (f, args) |
|
393 | 393 | if n in context._filters: |
|
394 | 394 | args = _buildfuncargs(exp[2], context, exprmethods, n, argspec=None) |
|
395 | 395 | if len(args) != 1: |
|
396 | 396 | raise error.ParseError(_("filter %s expects one argument") % n) |
|
397 | 397 | f = context._filters[n] |
|
398 | 398 | return (templateutil.runfilter, (args[0], f)) |
|
399 | 399 | raise error.ParseError(_("unknown function '%s'") % n) |
|
400 | 400 | |
|
401 | 401 | def _buildfuncargs(exp, context, curmethods, funcname, argspec): |
|
402 | 402 | """Compile parsed tree of function arguments into list or dict of |
|
403 | 403 | (func, data) pairs |
|
404 | 404 | |
|
405 | 405 | >>> context = engine(lambda t: (templateutil.runsymbol, t)) |
|
406 | 406 | >>> def fargs(expr, argspec): |
|
407 | 407 | ... x = _parseexpr(expr) |
|
408 | 408 | ... n = getsymbol(x[1]) |
|
409 | 409 | ... return _buildfuncargs(x[2], context, exprmethods, n, argspec) |
|
410 | 410 | >>> list(fargs(b'a(l=1, k=2)', b'k l m').keys()) |
|
411 | 411 | ['l', 'k'] |
|
412 | 412 | >>> args = fargs(b'a(opts=1, k=2)', b'**opts') |
|
413 | 413 | >>> list(args.keys()), list(args[b'opts'].keys()) |
|
414 | 414 | (['opts'], ['opts', 'k']) |
|
415 | 415 | """ |
|
416 | 416 | def compiledict(xs): |
|
417 | 417 | return util.sortdict((k, compileexp(x, context, curmethods)) |
|
418 | 418 | for k, x in xs.iteritems()) |
|
419 | 419 | def compilelist(xs): |
|
420 | 420 | return [compileexp(x, context, curmethods) for x in xs] |
|
421 | 421 | |
|
422 | 422 | if not argspec: |
|
423 | 423 | # filter or function with no argspec: return list of positional args |
|
424 | 424 | return compilelist(getlist(exp)) |
|
425 | 425 | |
|
426 | 426 | # function with argspec: return dict of named args |
|
427 | 427 | _poskeys, varkey, _keys, optkey = argspec = parser.splitargspec(argspec) |
|
428 | 428 | treeargs = parser.buildargsdict(getlist(exp), funcname, argspec, |
|
429 | 429 | keyvaluenode='keyvalue', keynode='symbol') |
|
430 | 430 | compargs = util.sortdict() |
|
431 | 431 | if varkey: |
|
432 | 432 | compargs[varkey] = compilelist(treeargs.pop(varkey)) |
|
433 | 433 | if optkey: |
|
434 | 434 | compargs[optkey] = compiledict(treeargs.pop(optkey)) |
|
435 | 435 | compargs.update(compiledict(treeargs)) |
|
436 | 436 | return compargs |
|
437 | 437 | |
|
438 | 438 | def buildkeyvaluepair(exp, content): |
|
439 | 439 | raise error.ParseError(_("can't use a key-value pair in this context")) |
|
440 | 440 | |
|
441 | 441 | # methods to interpret function arguments or inner expressions (e.g. {_(x)}) |
|
442 | 442 | exprmethods = { |
|
443 | 443 | "integer": lambda e, c: (templateutil.runinteger, e[1]), |
|
444 | 444 | "string": lambda e, c: (templateutil.runstring, e[1]), |
|
445 | 445 | "symbol": lambda e, c: (templateutil.runsymbol, e[1]), |
|
446 | 446 | "template": buildtemplate, |
|
447 | 447 | "group": lambda e, c: compileexp(e[1], c, exprmethods), |
|
448 | 448 | ".": buildmember, |
|
449 | 449 | "|": buildfilter, |
|
450 | 450 | "%": buildmap, |
|
451 | 451 | "func": buildfunc, |
|
452 | 452 | "keyvalue": buildkeyvaluepair, |
|
453 | 453 | "+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b), |
|
454 | 454 | "-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b), |
|
455 | 455 | "negate": buildnegate, |
|
456 | 456 | "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b), |
|
457 | 457 | "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b), |
|
458 | 458 | } |
|
459 | 459 | |
|
460 | 460 | # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"}) |
|
461 | 461 | methods = exprmethods.copy() |
|
462 | 462 | methods["integer"] = exprmethods["symbol"] # '{1}' as variable |
|
463 | 463 | |
|
464 | 464 | class _aliasrules(parser.basealiasrules): |
|
465 | 465 | """Parsing and expansion rule set of template aliases""" |
|
466 | 466 | _section = _('template alias') |
|
467 | 467 | _parse = staticmethod(_parseexpr) |
|
468 | 468 | |
|
469 | 469 | @staticmethod |
|
470 | 470 | def _trygetfunc(tree): |
|
471 | 471 | """Return (name, args) if tree is func(...) or ...|filter; otherwise |
|
472 | 472 | None""" |
|
473 | 473 | if tree[0] == 'func' and tree[1][0] == 'symbol': |
|
474 | 474 | return tree[1][1], getlist(tree[2]) |
|
475 | 475 | if tree[0] == '|' and tree[2][0] == 'symbol': |
|
476 | 476 | return tree[2][1], [tree[1]] |
|
477 | 477 | |
|
478 | 478 | def expandaliases(tree, aliases): |
|
479 | 479 | """Return new tree of aliases are expanded""" |
|
480 | 480 | aliasmap = _aliasrules.buildmap(aliases) |
|
481 | 481 | return _aliasrules.expand(aliasmap, tree) |
|
482 | 482 | |
|
483 | 483 | # template engine |
|
484 | 484 | |
|
485 | 485 | def _flatten(thing): |
|
486 | 486 | '''yield a single stream from a possibly nested set of iterators''' |
|
487 | 487 | thing = templateutil.unwraphybrid(thing) |
|
488 | 488 | if isinstance(thing, bytes): |
|
489 | 489 | yield thing |
|
490 | 490 | elif isinstance(thing, str): |
|
491 | 491 | # We can only hit this on Python 3, and it's here to guard |
|
492 | 492 | # against infinite recursion. |
|
493 | 493 | raise error.ProgrammingError('Mercurial IO including templates is done' |
|
494 | 494 | ' with bytes, not strings, got %r' % thing) |
|
495 | 495 | elif thing is None: |
|
496 | 496 | pass |
|
497 | 497 | elif not util.safehasattr(thing, '__iter__'): |
|
498 | 498 | yield pycompat.bytestr(thing) |
|
499 | 499 | else: |
|
500 | 500 | for i in thing: |
|
501 | 501 | i = templateutil.unwraphybrid(i) |
|
502 | 502 | if isinstance(i, bytes): |
|
503 | 503 | yield i |
|
504 | 504 | elif i is None: |
|
505 | 505 | pass |
|
506 | 506 | elif not util.safehasattr(i, '__iter__'): |
|
507 | 507 | yield pycompat.bytestr(i) |
|
508 | 508 | else: |
|
509 | 509 | for j in _flatten(i): |
|
510 | 510 | yield j |
|
511 | 511 | |
|
512 | 512 | def unquotestring(s): |
|
513 | 513 | '''unwrap quotes if any; otherwise returns unmodified string''' |
|
514 | 514 | if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]: |
|
515 | 515 | return s |
|
516 | 516 | return s[1:-1] |
|
517 | 517 | |
|
518 | 518 | class engine(object): |
|
519 | 519 | '''template expansion engine. |
|
520 | 520 | |
|
521 | 521 | template expansion works like this. a map file contains key=value |
|
522 | 522 | pairs. if value is quoted, it is treated as string. otherwise, it |
|
523 | 523 | is treated as name of template file. |
|
524 | 524 | |
|
525 | 525 | templater is asked to expand a key in map. it looks up key, and |
|
526 | 526 | looks for strings like this: {foo}. it expands {foo} by looking up |
|
527 | 527 | foo in map, and substituting it. expansion is recursive: it stops |
|
528 | 528 | when there is no more {foo} to replace. |
|
529 | 529 | |
|
530 | 530 | expansion also allows formatting and filtering. |
|
531 | 531 | |
|
532 | 532 | format uses key to expand each item in list. syntax is |
|
533 | 533 | {key%format}. |
|
534 | 534 | |
|
535 | 535 | filter uses function to transform value. syntax is |
|
536 | 536 | {key|filter1|filter2|...}.''' |
|
537 | 537 | |
|
538 | 538 | def __init__(self, loader, filters=None, defaults=None, resources=None, |
|
539 | 539 | aliases=()): |
|
540 | 540 | self._loader = loader |
|
541 | 541 | if filters is None: |
|
542 | 542 | filters = {} |
|
543 | 543 | self._filters = filters |
|
544 | 544 | self._funcs = templatefuncs.funcs # make this a parameter if needed |
|
545 | 545 | if defaults is None: |
|
546 | 546 | defaults = {} |
|
547 | 547 | if resources is None: |
|
548 | 548 | resources = {} |
|
549 | 549 | self._defaults = defaults |
|
550 | 550 | self._resources = resources |
|
551 | 551 | self._aliasmap = _aliasrules.buildmap(aliases) |
|
552 | 552 | self._cache = {} # key: (func, data) |
|
553 | 553 | |
|
554 | 554 | def symbol(self, mapping, key): |
|
555 | 555 | """Resolve symbol to value or function; None if nothing found""" |
|
556 | 556 | v = None |
|
557 | 557 | if key not in self._resources: |
|
558 | 558 | v = mapping.get(key) |
|
559 | 559 | if v is None: |
|
560 | 560 | v = self._defaults.get(key) |
|
561 | 561 | return v |
|
562 | 562 | |
|
563 | 563 | def resource(self, mapping, key): |
|
564 | 564 | """Return internal data (e.g. cache) used for keyword/function |
|
565 | 565 | evaluation""" |
|
566 | 566 | v = None |
|
567 | 567 | if key in self._resources: |
|
568 | 568 | v = self._resources[key](self, mapping, key) |
|
569 | 569 | if v is None: |
|
570 | 570 | raise templateutil.ResourceUnavailable( |
|
571 | 571 | _('template resource not available: %s') % key) |
|
572 | 572 | return v |
|
573 | 573 | |
|
574 | 574 | def _load(self, t): |
|
575 | 575 | '''load, parse, and cache a template''' |
|
576 | 576 | if t not in self._cache: |
|
577 | 577 | # put poison to cut recursion while compiling 't' |
|
578 | 578 | self._cache[t] = (_runrecursivesymbol, t) |
|
579 | 579 | try: |
|
580 | 580 | x = parse(self._loader(t)) |
|
581 | 581 | if self._aliasmap: |
|
582 | 582 | x = _aliasrules.expand(self._aliasmap, x) |
|
583 | 583 | self._cache[t] = compileexp(x, self, methods) |
|
584 | 584 | except: # re-raises |
|
585 | 585 | del self._cache[t] |
|
586 | 586 | raise |
|
587 | 587 | return self._cache[t] |
|
588 | 588 | |
|
589 | 589 | def process(self, t, mapping): |
|
590 | 590 | '''Perform expansion. t is name of map element to expand. |
|
591 | 591 | mapping contains added elements for use during expansion. Is a |
|
592 | 592 | generator.''' |
|
593 | 593 | func, data = self._load(t) |
|
594 | 594 | return _flatten(func(self, mapping, data)) |
|
595 | 595 | |
|
596 | 596 | engines = {'default': engine} |
|
597 | 597 | |
|
598 | 598 | def stylelist(): |
|
599 | 599 | paths = templatepaths() |
|
600 | 600 | if not paths: |
|
601 | 601 | return _('no templates found, try `hg debuginstall` for more info') |
|
602 | 602 | dirlist = os.listdir(paths[0]) |
|
603 | 603 | stylelist = [] |
|
604 | 604 | for file in dirlist: |
|
605 | 605 | split = file.split(".") |
|
606 | 606 | if split[-1] in ('orig', 'rej'): |
|
607 | 607 | continue |
|
608 | 608 | if split[0] == "map-cmdline": |
|
609 | 609 | stylelist.append(split[1]) |
|
610 | 610 | return ", ".join(sorted(stylelist)) |
|
611 | 611 | |
|
612 | 612 | def _readmapfile(mapfile): |
|
613 | 613 | """Load template elements from the given map file""" |
|
614 | 614 | if not os.path.exists(mapfile): |
|
615 | 615 | raise error.Abort(_("style '%s' not found") % mapfile, |
|
616 | 616 | hint=_("available styles: %s") % stylelist()) |
|
617 | 617 | |
|
618 | 618 | base = os.path.dirname(mapfile) |
|
619 | 619 | conf = config.config(includepaths=templatepaths()) |
|
620 | 620 | conf.read(mapfile, remap={'': 'templates'}) |
|
621 | 621 | |
|
622 | 622 | cache = {} |
|
623 | 623 | tmap = {} |
|
624 | 624 | aliases = [] |
|
625 | 625 | |
|
626 | 626 | val = conf.get('templates', '__base__') |
|
627 | 627 | if val and val[0] not in "'\"": |
|
628 | 628 | # treat as a pointer to a base class for this style |
|
629 | 629 | path = util.normpath(os.path.join(base, val)) |
|
630 | 630 | |
|
631 | 631 | # fallback check in template paths |
|
632 | 632 | if not os.path.exists(path): |
|
633 | 633 | for p in templatepaths(): |
|
634 | 634 | p2 = util.normpath(os.path.join(p, val)) |
|
635 | 635 | if os.path.isfile(p2): |
|
636 | 636 | path = p2 |
|
637 | 637 | break |
|
638 | 638 | p3 = util.normpath(os.path.join(p2, "map")) |
|
639 | 639 | if os.path.isfile(p3): |
|
640 | 640 | path = p3 |
|
641 | 641 | break |
|
642 | 642 | |
|
643 | 643 | cache, tmap, aliases = _readmapfile(path) |
|
644 | 644 | |
|
645 | 645 | for key, val in conf['templates'].items(): |
|
646 | 646 | if not val: |
|
647 | 647 | raise error.ParseError(_('missing value'), |
|
648 | 648 | conf.source('templates', key)) |
|
649 | 649 | if val[0] in "'\"": |
|
650 | 650 | if val[0] != val[-1]: |
|
651 | 651 | raise error.ParseError(_('unmatched quotes'), |
|
652 | 652 | conf.source('templates', key)) |
|
653 | 653 | cache[key] = unquotestring(val) |
|
654 | 654 | elif key != '__base__': |
|
655 | 655 | val = 'default', val |
|
656 | 656 | if ':' in val[1]: |
|
657 | 657 | val = val[1].split(':', 1) |
|
658 | 658 | tmap[key] = val[0], os.path.join(base, val[1]) |
|
659 | 659 | aliases.extend(conf['templatealias'].items()) |
|
660 | 660 | return cache, tmap, aliases |
|
661 | 661 | |
|
662 | 662 | class templater(object): |
|
663 | 663 | |
|
664 | 664 | def __init__(self, filters=None, defaults=None, resources=None, |
|
665 | 665 | cache=None, aliases=(), minchunk=1024, maxchunk=65536): |
|
666 | 666 | """Create template engine optionally with preloaded template fragments |
|
667 | 667 | |
|
668 | 668 | - ``filters``: a dict of functions to transform a value into another. |
|
669 | 669 | - ``defaults``: a dict of symbol values/functions; may be overridden |
|
670 | 670 | by a ``mapping`` dict. |
|
671 | 671 | - ``resources``: a dict of functions returning internal data |
|
672 | 672 | (e.g. cache), inaccessible from user template. |
|
673 | 673 | - ``cache``: a dict of preloaded template fragments. |
|
674 | 674 | - ``aliases``: a list of alias (name, replacement) pairs. |
|
675 | 675 | |
|
676 | 676 | self.cache may be updated later to register additional template |
|
677 | 677 | fragments. |
|
678 | 678 | """ |
|
679 | 679 | if filters is None: |
|
680 | 680 | filters = {} |
|
681 | 681 | if defaults is None: |
|
682 | 682 | defaults = {} |
|
683 | 683 | if resources is None: |
|
684 | 684 | resources = {} |
|
685 | 685 | if cache is None: |
|
686 | 686 | cache = {} |
|
687 | 687 | self.cache = cache.copy() |
|
688 | 688 | self.map = {} |
|
689 | 689 | self.filters = templatefilters.filters.copy() |
|
690 | 690 | self.filters.update(filters) |
|
691 | 691 | self.defaults = defaults |
|
692 | 692 | self._resources = {'templ': lambda context, mapping, key: self} |
|
693 | 693 | self._resources.update(resources) |
|
694 | 694 | self._aliases = aliases |
|
695 | 695 | self.minchunk, self.maxchunk = minchunk, maxchunk |
|
696 | 696 | self.ecache = {} |
|
697 | 697 | |
|
698 | 698 | @classmethod |
|
699 | 699 | def frommapfile(cls, mapfile, filters=None, defaults=None, resources=None, |
|
700 | 700 | cache=None, minchunk=1024, maxchunk=65536): |
|
701 | 701 | """Create templater from the specified map file""" |
|
702 | 702 | t = cls(filters, defaults, resources, cache, [], minchunk, maxchunk) |
|
703 | 703 | cache, tmap, aliases = _readmapfile(mapfile) |
|
704 | 704 | t.cache.update(cache) |
|
705 | 705 | t.map = tmap |
|
706 | 706 | t._aliases = aliases |
|
707 | 707 | return t |
|
708 | 708 | |
|
709 | 709 | def __contains__(self, key): |
|
710 | 710 | return key in self.cache or key in self.map |
|
711 | 711 | |
|
712 | 712 | def load(self, t): |
|
713 | 713 | '''Get the template for the given template name. Use a local cache.''' |
|
714 | 714 | if t not in self.cache: |
|
715 | 715 | try: |
|
716 | 716 | self.cache[t] = util.readfile(self.map[t][1]) |
|
717 | 717 | except KeyError as inst: |
|
718 | 718 | raise templateutil.TemplateNotFound( |
|
719 | 719 | _('"%s" not in template map') % inst.args[0]) |
|
720 | 720 | except IOError as inst: |
|
721 | 721 | reason = (_('template file %s: %s') |
|
722 | 722 | % (self.map[t][1], util.forcebytestr(inst.args[1]))) |
|
723 | 723 | raise IOError(inst.args[0], encoding.strfromlocal(reason)) |
|
724 | 724 | return self.cache[t] |
|
725 | 725 | |
|
726 | 726 | def renderdefault(self, mapping): |
|
727 | 727 | """Render the default unnamed template and return result as string""" |
|
728 | return self.render('', mapping) | |
|
729 | ||
|
730 | def render(self, t, mapping): | |
|
731 | """Render the specified named template and return result as string""" | |
|
728 | 732 | mapping = pycompat.strkwargs(mapping) |
|
729 |
return templateutil.stringify(self( |
|
|
733 | return templateutil.stringify(self(t, **mapping)) | |
|
730 | 734 | |
|
731 | 735 | def __call__(self, t, **mapping): |
|
732 | 736 | mapping = pycompat.byteskwargs(mapping) |
|
733 | 737 | ttype = t in self.map and self.map[t][0] or 'default' |
|
734 | 738 | if ttype not in self.ecache: |
|
735 | 739 | try: |
|
736 | 740 | ecls = engines[ttype] |
|
737 | 741 | except KeyError: |
|
738 | 742 | raise error.Abort(_('invalid template engine: %s') % ttype) |
|
739 | 743 | self.ecache[ttype] = ecls(self.load, self.filters, self.defaults, |
|
740 | 744 | self._resources, self._aliases) |
|
741 | 745 | proc = self.ecache[ttype] |
|
742 | 746 | |
|
743 | 747 | stream = proc.process(t, mapping) |
|
744 | 748 | if self.minchunk: |
|
745 | 749 | stream = util.increasingchunks(stream, min=self.minchunk, |
|
746 | 750 | max=self.maxchunk) |
|
747 | 751 | return stream |
|
748 | 752 | |
|
749 | 753 | def templatepaths(): |
|
750 | 754 | '''return locations used for template files.''' |
|
751 | 755 | pathsrel = ['templates'] |
|
752 | 756 | paths = [os.path.normpath(os.path.join(util.datapath, f)) |
|
753 | 757 | for f in pathsrel] |
|
754 | 758 | return [p for p in paths if os.path.isdir(p)] |
|
755 | 759 | |
|
756 | 760 | def templatepath(name): |
|
757 | 761 | '''return location of template file. returns None if not found.''' |
|
758 | 762 | for p in templatepaths(): |
|
759 | 763 | f = os.path.join(p, name) |
|
760 | 764 | if os.path.exists(f): |
|
761 | 765 | return f |
|
762 | 766 | return None |
|
763 | 767 | |
|
764 | 768 | def stylemap(styles, paths=None): |
|
765 | 769 | """Return path to mapfile for a given style. |
|
766 | 770 | |
|
767 | 771 | Searches mapfile in the following locations: |
|
768 | 772 | 1. templatepath/style/map |
|
769 | 773 | 2. templatepath/map-style |
|
770 | 774 | 3. templatepath/map |
|
771 | 775 | """ |
|
772 | 776 | |
|
773 | 777 | if paths is None: |
|
774 | 778 | paths = templatepaths() |
|
775 | 779 | elif isinstance(paths, bytes): |
|
776 | 780 | paths = [paths] |
|
777 | 781 | |
|
778 | 782 | if isinstance(styles, bytes): |
|
779 | 783 | styles = [styles] |
|
780 | 784 | |
|
781 | 785 | for style in styles: |
|
782 | 786 | # only plain name is allowed to honor template paths |
|
783 | 787 | if (not style |
|
784 | 788 | or style in (pycompat.oscurdir, pycompat.ospardir) |
|
785 | 789 | or pycompat.ossep in style |
|
786 | 790 | or pycompat.osaltsep and pycompat.osaltsep in style): |
|
787 | 791 | continue |
|
788 | 792 | locations = [os.path.join(style, 'map'), 'map-' + style] |
|
789 | 793 | locations.append('map') |
|
790 | 794 | |
|
791 | 795 | for path in paths: |
|
792 | 796 | for location in locations: |
|
793 | 797 | mapfile = os.path.join(path, location) |
|
794 | 798 | if os.path.isfile(mapfile): |
|
795 | 799 | return style, mapfile |
|
796 | 800 | |
|
797 | 801 | raise RuntimeError("No hgweb templates found in %r" % paths) |
General Comments 0
You need to be logged in to leave comments.
Login now