Show More
@@ -1,453 +1,454 b'' | |||
|
1 | 1 | # hgweb/hgweb_mod.py - Web interface for a repository. |
|
2 | 2 | # |
|
3 | 3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
4 | 4 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
5 | 5 | # |
|
6 | 6 | # This software may be used and distributed according to the terms of the |
|
7 | 7 | # GNU General Public License version 2 or any later version. |
|
8 | 8 | |
|
9 | 9 | from __future__ import absolute_import |
|
10 | 10 | |
|
11 | 11 | import contextlib |
|
12 | 12 | import os |
|
13 | 13 | |
|
14 | 14 | from .common import ( |
|
15 | 15 | ErrorResponse, |
|
16 | 16 | HTTP_BAD_REQUEST, |
|
17 | 17 | cspvalues, |
|
18 | 18 | permhooks, |
|
19 | 19 | statusmessage, |
|
20 | 20 | ) |
|
21 | 21 | |
|
22 | 22 | from .. import ( |
|
23 | 23 | encoding, |
|
24 | 24 | error, |
|
25 | 25 | formatter, |
|
26 | 26 | hg, |
|
27 | 27 | hook, |
|
28 | 28 | profiling, |
|
29 | 29 | pycompat, |
|
30 | 30 | repoview, |
|
31 | 31 | templatefilters, |
|
32 | 32 | templater, |
|
33 | 33 | ui as uimod, |
|
34 | 34 | util, |
|
35 | 35 | wireprotoserver, |
|
36 | 36 | ) |
|
37 | 37 | |
|
38 | 38 | from . import ( |
|
39 | 39 | request as requestmod, |
|
40 | 40 | webcommands, |
|
41 | 41 | webutil, |
|
42 | 42 | wsgicgi, |
|
43 | 43 | ) |
|
44 | 44 | |
|
45 | 45 | archivespecs = util.sortdict(( |
|
46 | 46 | ('zip', ('application/zip', 'zip', '.zip', None)), |
|
47 | 47 | ('gz', ('application/x-gzip', 'tgz', '.tar.gz', None)), |
|
48 | 48 | ('bz2', ('application/x-bzip2', 'tbz2', '.tar.bz2', None)), |
|
49 | 49 | )) |
|
50 | 50 | |
|
51 | 51 | def getstyle(req, configfn, templatepath): |
|
52 | 52 | styles = ( |
|
53 | 53 | req.qsparams.get('style', None), |
|
54 | 54 | configfn('web', 'style'), |
|
55 | 55 | 'paper', |
|
56 | 56 | ) |
|
57 | 57 | return styles, templater.stylemap(styles, templatepath) |
|
58 | 58 | |
|
59 | 59 | def makebreadcrumb(url, prefix=''): |
|
60 | 60 | '''Return a 'URL breadcrumb' list |
|
61 | 61 | |
|
62 | 62 | A 'URL breadcrumb' is a list of URL-name pairs, |
|
63 | 63 | corresponding to each of the path items on a URL. |
|
64 | 64 | This can be used to create path navigation entries. |
|
65 | 65 | ''' |
|
66 | 66 | if url.endswith('/'): |
|
67 | 67 | url = url[:-1] |
|
68 | 68 | if prefix: |
|
69 | 69 | url = '/' + prefix + url |
|
70 | 70 | relpath = url |
|
71 | 71 | if relpath.startswith('/'): |
|
72 | 72 | relpath = relpath[1:] |
|
73 | 73 | |
|
74 | 74 | breadcrumb = [] |
|
75 | 75 | urlel = url |
|
76 | 76 | pathitems = [''] + relpath.split('/') |
|
77 | 77 | for pathel in reversed(pathitems): |
|
78 | 78 | if not pathel or not urlel: |
|
79 | 79 | break |
|
80 | 80 | breadcrumb.append({'url': urlel, 'name': pathel}) |
|
81 | 81 | urlel = os.path.dirname(urlel) |
|
82 | 82 | return reversed(breadcrumb) |
|
83 | 83 | |
|
84 | 84 | class requestcontext(object): |
|
85 | 85 | """Holds state/context for an individual request. |
|
86 | 86 | |
|
87 | 87 | Servers can be multi-threaded. Holding state on the WSGI application |
|
88 | 88 | is prone to race conditions. Instances of this class exist to hold |
|
89 | 89 | mutable and race-free state for requests. |
|
90 | 90 | """ |
|
91 | 91 | def __init__(self, app, repo, req, res): |
|
92 | 92 | self.repo = repo |
|
93 | 93 | self.reponame = app.reponame |
|
94 | 94 | self.req = req |
|
95 | 95 | self.res = res |
|
96 | 96 | |
|
97 | 97 | self.archivespecs = archivespecs |
|
98 | 98 | |
|
99 | 99 | self.maxchanges = self.configint('web', 'maxchanges') |
|
100 | 100 | self.stripecount = self.configint('web', 'stripes') |
|
101 | 101 | self.maxshortchanges = self.configint('web', 'maxshortchanges') |
|
102 | 102 | self.maxfiles = self.configint('web', 'maxfiles') |
|
103 | 103 | self.allowpull = self.configbool('web', 'allow-pull') |
|
104 | 104 | |
|
105 | 105 | # we use untrusted=False to prevent a repo owner from using |
|
106 | 106 | # web.templates in .hg/hgrc to get access to any file readable |
|
107 | 107 | # by the user running the CGI script |
|
108 | 108 | self.templatepath = self.config('web', 'templates', untrusted=False) |
|
109 | 109 | |
|
110 | 110 | # This object is more expensive to build than simple config values. |
|
111 | 111 | # It is shared across requests. The app will replace the object |
|
112 | 112 | # if it is updated. Since this is a reference and nothing should |
|
113 | 113 | # modify the underlying object, it should be constant for the lifetime |
|
114 | 114 | # of the request. |
|
115 | 115 | self.websubtable = app.websubtable |
|
116 | 116 | |
|
117 | 117 | self.csp, self.nonce = cspvalues(self.repo.ui) |
|
118 | 118 | |
|
119 | 119 | # Trust the settings from the .hg/hgrc files by default. |
|
120 | 120 | def config(self, section, name, default=uimod._unset, untrusted=True): |
|
121 | 121 | return self.repo.ui.config(section, name, default, |
|
122 | 122 | untrusted=untrusted) |
|
123 | 123 | |
|
124 | 124 | def configbool(self, section, name, default=uimod._unset, untrusted=True): |
|
125 | 125 | return self.repo.ui.configbool(section, name, default, |
|
126 | 126 | untrusted=untrusted) |
|
127 | 127 | |
|
128 | 128 | def configint(self, section, name, default=uimod._unset, untrusted=True): |
|
129 | 129 | return self.repo.ui.configint(section, name, default, |
|
130 | 130 | untrusted=untrusted) |
|
131 | 131 | |
|
132 | 132 | def configlist(self, section, name, default=uimod._unset, untrusted=True): |
|
133 | 133 | return self.repo.ui.configlist(section, name, default, |
|
134 | 134 | untrusted=untrusted) |
|
135 | 135 | |
|
136 | 136 | def archivelist(self, nodeid): |
|
137 | 137 | allowed = self.configlist('web', 'allow_archive') |
|
138 | 138 | for typ, spec in self.archivespecs.iteritems(): |
|
139 | 139 | if typ in allowed or self.configbool('web', 'allow%s' % typ): |
|
140 | 140 | yield {'type': typ, 'extension': spec[2], 'node': nodeid} |
|
141 | 141 | |
|
142 | 142 | def templater(self, req): |
|
143 | 143 | # determine scheme, port and server name |
|
144 | 144 | # this is needed to create absolute urls |
|
145 | 145 | logourl = self.config('web', 'logourl') |
|
146 | 146 | logoimg = self.config('web', 'logoimg') |
|
147 | 147 | staticurl = (self.config('web', 'staticurl') |
|
148 | 148 | or req.apppath + '/static/') |
|
149 | 149 | if not staticurl.endswith('/'): |
|
150 | 150 | staticurl += '/' |
|
151 | 151 | |
|
152 | 152 | # some functions for the templater |
|
153 | 153 | |
|
154 | 154 | def motd(**map): |
|
155 | 155 | yield self.config('web', 'motd') |
|
156 | 156 | |
|
157 | 157 | # figure out which style to use |
|
158 | 158 | |
|
159 | 159 | vars = {} |
|
160 | 160 | styles, (style, mapfile) = getstyle(req, self.config, |
|
161 | 161 | self.templatepath) |
|
162 | 162 | if style == styles[0]: |
|
163 | 163 | vars['style'] = style |
|
164 | 164 | |
|
165 | 165 | sessionvars = webutil.sessionvars(vars, '?') |
|
166 | 166 | |
|
167 | 167 | if not self.reponame: |
|
168 | 168 | self.reponame = (self.config('web', 'name', '') |
|
169 | 169 | or req.reponame |
|
170 | 170 | or req.apppath |
|
171 | 171 | or self.repo.root) |
|
172 | 172 | |
|
173 | 173 | def websubfilter(text): |
|
174 | 174 | return templatefilters.websub(text, self.websubtable) |
|
175 | 175 | |
|
176 | 176 | # create the templater |
|
177 | 177 | # TODO: export all keywords: defaults = templatekw.keywords.copy() |
|
178 | 178 | defaults = { |
|
179 | 179 | 'url': req.apppath + '/', |
|
180 | 180 | 'logourl': logourl, |
|
181 | 181 | 'logoimg': logoimg, |
|
182 | 182 | 'staticurl': staticurl, |
|
183 | 183 | 'urlbase': req.advertisedbaseurl, |
|
184 | 184 | 'repo': self.reponame, |
|
185 | 185 | 'encoding': encoding.encoding, |
|
186 | 186 | 'motd': motd, |
|
187 | 187 | 'sessionvars': sessionvars, |
|
188 | 188 | 'pathdef': makebreadcrumb(req.apppath), |
|
189 | 189 | 'style': style, |
|
190 | 190 | 'nonce': self.nonce, |
|
191 | 191 | } |
|
192 | 192 | tres = formatter.templateresources(self.repo.ui, self.repo) |
|
193 | 193 | tmpl = templater.templater.frommapfile(mapfile, |
|
194 | 194 | filters={'websub': websubfilter}, |
|
195 | 195 | defaults=defaults, |
|
196 | 196 | resources=tres) |
|
197 | 197 | return tmpl |
|
198 | 198 | |
|
199 | 199 | def sendtemplate(self, name, **kwargs): |
|
200 | 200 | """Helper function to send a response generated from a template.""" |
|
201 | self.res.setbodygen(self.tmpl(name, **kwargs)) | |
|
201 | kwargs = pycompat.byteskwargs(kwargs) | |
|
202 | self.res.setbodygen(self.tmpl.generate(name, kwargs)) | |
|
202 | 203 | return self.res.sendresponse() |
|
203 | 204 | |
|
204 | 205 | class hgweb(object): |
|
205 | 206 | """HTTP server for individual repositories. |
|
206 | 207 | |
|
207 | 208 | Instances of this class serve HTTP responses for a particular |
|
208 | 209 | repository. |
|
209 | 210 | |
|
210 | 211 | Instances are typically used as WSGI applications. |
|
211 | 212 | |
|
212 | 213 | Some servers are multi-threaded. On these servers, there may |
|
213 | 214 | be multiple active threads inside __call__. |
|
214 | 215 | """ |
|
215 | 216 | def __init__(self, repo, name=None, baseui=None): |
|
216 | 217 | if isinstance(repo, str): |
|
217 | 218 | if baseui: |
|
218 | 219 | u = baseui.copy() |
|
219 | 220 | else: |
|
220 | 221 | u = uimod.ui.load() |
|
221 | 222 | r = hg.repository(u, repo) |
|
222 | 223 | else: |
|
223 | 224 | # we trust caller to give us a private copy |
|
224 | 225 | r = repo |
|
225 | 226 | |
|
226 | 227 | r.ui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') |
|
227 | 228 | r.baseui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') |
|
228 | 229 | r.ui.setconfig('ui', 'nontty', 'true', 'hgweb') |
|
229 | 230 | r.baseui.setconfig('ui', 'nontty', 'true', 'hgweb') |
|
230 | 231 | # resolve file patterns relative to repo root |
|
231 | 232 | r.ui.setconfig('ui', 'forcecwd', r.root, 'hgweb') |
|
232 | 233 | r.baseui.setconfig('ui', 'forcecwd', r.root, 'hgweb') |
|
233 | 234 | # displaying bundling progress bar while serving feel wrong and may |
|
234 | 235 | # break some wsgi implementation. |
|
235 | 236 | r.ui.setconfig('progress', 'disable', 'true', 'hgweb') |
|
236 | 237 | r.baseui.setconfig('progress', 'disable', 'true', 'hgweb') |
|
237 | 238 | self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))] |
|
238 | 239 | self._lastrepo = self._repos[0] |
|
239 | 240 | hook.redirect(True) |
|
240 | 241 | self.reponame = name |
|
241 | 242 | |
|
242 | 243 | def _webifyrepo(self, repo): |
|
243 | 244 | repo = getwebview(repo) |
|
244 | 245 | self.websubtable = webutil.getwebsubs(repo) |
|
245 | 246 | return repo |
|
246 | 247 | |
|
247 | 248 | @contextlib.contextmanager |
|
248 | 249 | def _obtainrepo(self): |
|
249 | 250 | """Obtain a repo unique to the caller. |
|
250 | 251 | |
|
251 | 252 | Internally we maintain a stack of cachedlocalrepo instances |
|
252 | 253 | to be handed out. If one is available, we pop it and return it, |
|
253 | 254 | ensuring it is up to date in the process. If one is not available, |
|
254 | 255 | we clone the most recently used repo instance and return it. |
|
255 | 256 | |
|
256 | 257 | It is currently possible for the stack to grow without bounds |
|
257 | 258 | if the server allows infinite threads. However, servers should |
|
258 | 259 | have a thread limit, thus establishing our limit. |
|
259 | 260 | """ |
|
260 | 261 | if self._repos: |
|
261 | 262 | cached = self._repos.pop() |
|
262 | 263 | r, created = cached.fetch() |
|
263 | 264 | else: |
|
264 | 265 | cached = self._lastrepo.copy() |
|
265 | 266 | r, created = cached.fetch() |
|
266 | 267 | if created: |
|
267 | 268 | r = self._webifyrepo(r) |
|
268 | 269 | |
|
269 | 270 | self._lastrepo = cached |
|
270 | 271 | self.mtime = cached.mtime |
|
271 | 272 | try: |
|
272 | 273 | yield r |
|
273 | 274 | finally: |
|
274 | 275 | self._repos.append(cached) |
|
275 | 276 | |
|
276 | 277 | def run(self): |
|
277 | 278 | """Start a server from CGI environment. |
|
278 | 279 | |
|
279 | 280 | Modern servers should be using WSGI and should avoid this |
|
280 | 281 | method, if possible. |
|
281 | 282 | """ |
|
282 | 283 | if not encoding.environ.get('GATEWAY_INTERFACE', |
|
283 | 284 | '').startswith("CGI/1."): |
|
284 | 285 | raise RuntimeError("This function is only intended to be " |
|
285 | 286 | "called while running as a CGI script.") |
|
286 | 287 | wsgicgi.launch(self) |
|
287 | 288 | |
|
288 | 289 | def __call__(self, env, respond): |
|
289 | 290 | """Run the WSGI application. |
|
290 | 291 | |
|
291 | 292 | This may be called by multiple threads. |
|
292 | 293 | """ |
|
293 | 294 | req = requestmod.parserequestfromenv(env) |
|
294 | 295 | res = requestmod.wsgiresponse(req, respond) |
|
295 | 296 | |
|
296 | 297 | return self.run_wsgi(req, res) |
|
297 | 298 | |
|
298 | 299 | def run_wsgi(self, req, res): |
|
299 | 300 | """Internal method to run the WSGI application. |
|
300 | 301 | |
|
301 | 302 | This is typically only called by Mercurial. External consumers |
|
302 | 303 | should be using instances of this class as the WSGI application. |
|
303 | 304 | """ |
|
304 | 305 | with self._obtainrepo() as repo: |
|
305 | 306 | profile = repo.ui.configbool('profiling', 'enabled') |
|
306 | 307 | with profiling.profile(repo.ui, enabled=profile): |
|
307 | 308 | for r in self._runwsgi(req, res, repo): |
|
308 | 309 | yield r |
|
309 | 310 | |
|
310 | 311 | def _runwsgi(self, req, res, repo): |
|
311 | 312 | rctx = requestcontext(self, repo, req, res) |
|
312 | 313 | |
|
313 | 314 | # This state is global across all threads. |
|
314 | 315 | encoding.encoding = rctx.config('web', 'encoding') |
|
315 | 316 | rctx.repo.ui.environ = req.rawenv |
|
316 | 317 | |
|
317 | 318 | if rctx.csp: |
|
318 | 319 | # hgwebdir may have added CSP header. Since we generate our own, |
|
319 | 320 | # replace it. |
|
320 | 321 | res.headers['Content-Security-Policy'] = rctx.csp |
|
321 | 322 | |
|
322 | 323 | handled = wireprotoserver.handlewsgirequest( |
|
323 | 324 | rctx, req, res, self.check_perm) |
|
324 | 325 | if handled: |
|
325 | 326 | return res.sendresponse() |
|
326 | 327 | |
|
327 | 328 | # Old implementations of hgweb supported dispatching the request via |
|
328 | 329 | # the initial query string parameter instead of using PATH_INFO. |
|
329 | 330 | # If PATH_INFO is present (signaled by ``req.dispatchpath`` having |
|
330 | 331 | # a value), we use it. Otherwise fall back to the query string. |
|
331 | 332 | if req.dispatchpath is not None: |
|
332 | 333 | query = req.dispatchpath |
|
333 | 334 | else: |
|
334 | 335 | query = req.querystring.partition('&')[0].partition(';')[0] |
|
335 | 336 | |
|
336 | 337 | # translate user-visible url structure to internal structure |
|
337 | 338 | |
|
338 | 339 | args = query.split('/', 2) |
|
339 | 340 | if 'cmd' not in req.qsparams and args and args[0]: |
|
340 | 341 | cmd = args.pop(0) |
|
341 | 342 | style = cmd.rfind('-') |
|
342 | 343 | if style != -1: |
|
343 | 344 | req.qsparams['style'] = cmd[:style] |
|
344 | 345 | cmd = cmd[style + 1:] |
|
345 | 346 | |
|
346 | 347 | # avoid accepting e.g. style parameter as command |
|
347 | 348 | if util.safehasattr(webcommands, cmd): |
|
348 | 349 | req.qsparams['cmd'] = cmd |
|
349 | 350 | |
|
350 | 351 | if cmd == 'static': |
|
351 | 352 | req.qsparams['file'] = '/'.join(args) |
|
352 | 353 | else: |
|
353 | 354 | if args and args[0]: |
|
354 | 355 | node = args.pop(0).replace('%2F', '/') |
|
355 | 356 | req.qsparams['node'] = node |
|
356 | 357 | if args: |
|
357 | 358 | if 'file' in req.qsparams: |
|
358 | 359 | del req.qsparams['file'] |
|
359 | 360 | for a in args: |
|
360 | 361 | req.qsparams.add('file', a) |
|
361 | 362 | |
|
362 | 363 | ua = req.headers.get('User-Agent', '') |
|
363 | 364 | if cmd == 'rev' and 'mercurial' in ua: |
|
364 | 365 | req.qsparams['style'] = 'raw' |
|
365 | 366 | |
|
366 | 367 | if cmd == 'archive': |
|
367 | 368 | fn = req.qsparams['node'] |
|
368 | 369 | for type_, spec in rctx.archivespecs.iteritems(): |
|
369 | 370 | ext = spec[2] |
|
370 | 371 | if fn.endswith(ext): |
|
371 | 372 | req.qsparams['node'] = fn[:-len(ext)] |
|
372 | 373 | req.qsparams['type'] = type_ |
|
373 | 374 | else: |
|
374 | 375 | cmd = req.qsparams.get('cmd', '') |
|
375 | 376 | |
|
376 | 377 | # process the web interface request |
|
377 | 378 | |
|
378 | 379 | try: |
|
379 | 380 | rctx.tmpl = rctx.templater(req) |
|
380 | 381 | ctype = rctx.tmpl.render('mimetype', |
|
381 | 382 | {'encoding': encoding.encoding}) |
|
382 | 383 | |
|
383 | 384 | # check read permissions non-static content |
|
384 | 385 | if cmd != 'static': |
|
385 | 386 | self.check_perm(rctx, req, None) |
|
386 | 387 | |
|
387 | 388 | if cmd == '': |
|
388 | 389 | req.qsparams['cmd'] = rctx.tmpl.cache['default'] |
|
389 | 390 | cmd = req.qsparams['cmd'] |
|
390 | 391 | |
|
391 | 392 | # Don't enable caching if using a CSP nonce because then it wouldn't |
|
392 | 393 | # be a nonce. |
|
393 | 394 | if rctx.configbool('web', 'cache') and not rctx.nonce: |
|
394 | 395 | tag = 'W/"%d"' % self.mtime |
|
395 | 396 | if req.headers.get('If-None-Match') == tag: |
|
396 | 397 | res.status = '304 Not Modified' |
|
397 | 398 | # Response body not allowed on 304. |
|
398 | 399 | res.setbodybytes('') |
|
399 | 400 | return res.sendresponse() |
|
400 | 401 | |
|
401 | 402 | res.headers['ETag'] = tag |
|
402 | 403 | |
|
403 | 404 | if cmd not in webcommands.__all__: |
|
404 | 405 | msg = 'no such method: %s' % cmd |
|
405 | 406 | raise ErrorResponse(HTTP_BAD_REQUEST, msg) |
|
406 | 407 | else: |
|
407 | 408 | # Set some globals appropriate for web handlers. Commands can |
|
408 | 409 | # override easily enough. |
|
409 | 410 | res.status = '200 Script output follows' |
|
410 | 411 | res.headers['Content-Type'] = ctype |
|
411 | 412 | return getattr(webcommands, cmd)(rctx) |
|
412 | 413 | |
|
413 | 414 | except (error.LookupError, error.RepoLookupError) as err: |
|
414 | 415 | msg = pycompat.bytestr(err) |
|
415 | 416 | if (util.safehasattr(err, 'name') and |
|
416 | 417 | not isinstance(err, error.ManifestLookupError)): |
|
417 | 418 | msg = 'revision not found: %s' % err.name |
|
418 | 419 | |
|
419 | 420 | res.status = '404 Not Found' |
|
420 | 421 | res.headers['Content-Type'] = ctype |
|
421 | 422 | return rctx.sendtemplate('error', error=msg) |
|
422 | 423 | except (error.RepoError, error.RevlogError) as e: |
|
423 | 424 | res.status = '500 Internal Server Error' |
|
424 | 425 | res.headers['Content-Type'] = ctype |
|
425 | 426 | return rctx.sendtemplate('error', error=pycompat.bytestr(e)) |
|
426 | 427 | except ErrorResponse as e: |
|
427 | 428 | res.status = statusmessage(e.code, pycompat.bytestr(e)) |
|
428 | 429 | res.headers['Content-Type'] = ctype |
|
429 | 430 | return rctx.sendtemplate('error', error=pycompat.bytestr(e)) |
|
430 | 431 | |
|
431 | 432 | def check_perm(self, rctx, req, op): |
|
432 | 433 | for permhook in permhooks: |
|
433 | 434 | permhook(rctx, req, op) |
|
434 | 435 | |
|
435 | 436 | def getwebview(repo): |
|
436 | 437 | """The 'web.view' config controls changeset filter to hgweb. Possible |
|
437 | 438 | values are ``served``, ``visible`` and ``all``. Default is ``served``. |
|
438 | 439 | The ``served`` filter only shows changesets that can be pulled from the |
|
439 | 440 | hgweb instance. The``visible`` filter includes secret changesets but |
|
440 | 441 | still excludes "hidden" one. |
|
441 | 442 | |
|
442 | 443 | See the repoview module for details. |
|
443 | 444 | |
|
444 | 445 | The option has been around undocumented since Mercurial 2.5, but no |
|
445 | 446 | user ever asked about it. So we better keep it undocumented for now.""" |
|
446 | 447 | # experimental config: web.view |
|
447 | 448 | viewconfig = repo.ui.config('web', 'view', untrusted=True) |
|
448 | 449 | if viewconfig == 'all': |
|
449 | 450 | return repo.unfiltered() |
|
450 | 451 | elif viewconfig in repoview.filtertable: |
|
451 | 452 | return repo.filtered(viewconfig) |
|
452 | 453 | else: |
|
453 | 454 | return repo.filtered('served') |
@@ -1,536 +1,536 b'' | |||
|
1 | 1 | # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories. |
|
2 | 2 | # |
|
3 | 3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
4 | 4 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> |
|
5 | 5 | # |
|
6 | 6 | # This software may be used and distributed according to the terms of the |
|
7 | 7 | # GNU General Public License version 2 or any later version. |
|
8 | 8 | |
|
9 | 9 | from __future__ import absolute_import |
|
10 | 10 | |
|
11 | 11 | import gc |
|
12 | 12 | import os |
|
13 | 13 | import time |
|
14 | 14 | |
|
15 | 15 | from ..i18n import _ |
|
16 | 16 | |
|
17 | 17 | from .common import ( |
|
18 | 18 | ErrorResponse, |
|
19 | 19 | HTTP_SERVER_ERROR, |
|
20 | 20 | cspvalues, |
|
21 | 21 | get_contact, |
|
22 | 22 | get_mtime, |
|
23 | 23 | ismember, |
|
24 | 24 | paritygen, |
|
25 | 25 | staticfile, |
|
26 | 26 | statusmessage, |
|
27 | 27 | ) |
|
28 | 28 | |
|
29 | 29 | from .. import ( |
|
30 | 30 | configitems, |
|
31 | 31 | encoding, |
|
32 | 32 | error, |
|
33 | 33 | hg, |
|
34 | 34 | profiling, |
|
35 | 35 | pycompat, |
|
36 | 36 | scmutil, |
|
37 | 37 | templater, |
|
38 | 38 | ui as uimod, |
|
39 | 39 | util, |
|
40 | 40 | ) |
|
41 | 41 | |
|
42 | 42 | from . import ( |
|
43 | 43 | hgweb_mod, |
|
44 | 44 | request as requestmod, |
|
45 | 45 | webutil, |
|
46 | 46 | wsgicgi, |
|
47 | 47 | ) |
|
48 | 48 | from ..utils import dateutil |
|
49 | 49 | |
|
50 | 50 | def cleannames(items): |
|
51 | 51 | return [(util.pconvert(name).strip('/'), path) for name, path in items] |
|
52 | 52 | |
|
53 | 53 | def findrepos(paths): |
|
54 | 54 | repos = [] |
|
55 | 55 | for prefix, root in cleannames(paths): |
|
56 | 56 | roothead, roottail = os.path.split(root) |
|
57 | 57 | # "foo = /bar/*" or "foo = /bar/**" lets every repo /bar/N in or below |
|
58 | 58 | # /bar/ be served as as foo/N . |
|
59 | 59 | # '*' will not search inside dirs with .hg (except .hg/patches), |
|
60 | 60 | # '**' will search inside dirs with .hg (and thus also find subrepos). |
|
61 | 61 | try: |
|
62 | 62 | recurse = {'*': False, '**': True}[roottail] |
|
63 | 63 | except KeyError: |
|
64 | 64 | repos.append((prefix, root)) |
|
65 | 65 | continue |
|
66 | 66 | roothead = os.path.normpath(os.path.abspath(roothead)) |
|
67 | 67 | paths = scmutil.walkrepos(roothead, followsym=True, recurse=recurse) |
|
68 | 68 | repos.extend(urlrepos(prefix, roothead, paths)) |
|
69 | 69 | return repos |
|
70 | 70 | |
|
71 | 71 | def urlrepos(prefix, roothead, paths): |
|
72 | 72 | """yield url paths and filesystem paths from a list of repo paths |
|
73 | 73 | |
|
74 | 74 | >>> conv = lambda seq: [(v, util.pconvert(p)) for v,p in seq] |
|
75 | 75 | >>> conv(urlrepos(b'hg', b'/opt', [b'/opt/r', b'/opt/r/r', b'/opt'])) |
|
76 | 76 | [('hg/r', '/opt/r'), ('hg/r/r', '/opt/r/r'), ('hg', '/opt')] |
|
77 | 77 | >>> conv(urlrepos(b'', b'/opt', [b'/opt/r', b'/opt/r/r', b'/opt'])) |
|
78 | 78 | [('r', '/opt/r'), ('r/r', '/opt/r/r'), ('', '/opt')] |
|
79 | 79 | """ |
|
80 | 80 | for path in paths: |
|
81 | 81 | path = os.path.normpath(path) |
|
82 | 82 | yield (prefix + '/' + |
|
83 | 83 | util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path |
|
84 | 84 | |
|
85 | 85 | def readallowed(ui, req): |
|
86 | 86 | """Check allow_read and deny_read config options of a repo's ui object |
|
87 | 87 | to determine user permissions. By default, with neither option set (or |
|
88 | 88 | both empty), allow all users to read the repo. There are two ways a |
|
89 | 89 | user can be denied read access: (1) deny_read is not empty, and the |
|
90 | 90 | user is unauthenticated or deny_read contains user (or *), and (2) |
|
91 | 91 | allow_read is not empty and the user is not in allow_read. Return True |
|
92 | 92 | if user is allowed to read the repo, else return False.""" |
|
93 | 93 | |
|
94 | 94 | user = req.remoteuser |
|
95 | 95 | |
|
96 | 96 | deny_read = ui.configlist('web', 'deny_read', untrusted=True) |
|
97 | 97 | if deny_read and (not user or ismember(ui, user, deny_read)): |
|
98 | 98 | return False |
|
99 | 99 | |
|
100 | 100 | allow_read = ui.configlist('web', 'allow_read', untrusted=True) |
|
101 | 101 | # by default, allow reading if no allow_read option has been set |
|
102 | 102 | if not allow_read or ismember(ui, user, allow_read): |
|
103 | 103 | return True |
|
104 | 104 | |
|
105 | 105 | return False |
|
106 | 106 | |
|
107 | 107 | def archivelist(ui, nodeid, url): |
|
108 | 108 | allowed = ui.configlist('web', 'allow_archive', untrusted=True) |
|
109 | 109 | archives = [] |
|
110 | 110 | |
|
111 | 111 | for typ, spec in hgweb_mod.archivespecs.iteritems(): |
|
112 | 112 | if typ in allowed or ui.configbool('web', 'allow' + typ, |
|
113 | 113 | untrusted=True): |
|
114 | 114 | archives.append({ |
|
115 | 115 | 'type': typ, |
|
116 | 116 | 'extension': spec[2], |
|
117 | 117 | 'node': nodeid, |
|
118 | 118 | 'url': url, |
|
119 | 119 | }) |
|
120 | 120 | |
|
121 | 121 | return archives |
|
122 | 122 | |
|
123 | 123 | def rawindexentries(ui, repos, req, subdir=''): |
|
124 | 124 | descend = ui.configbool('web', 'descend') |
|
125 | 125 | collapse = ui.configbool('web', 'collapse') |
|
126 | 126 | seenrepos = set() |
|
127 | 127 | seendirs = set() |
|
128 | 128 | for name, path in repos: |
|
129 | 129 | |
|
130 | 130 | if not name.startswith(subdir): |
|
131 | 131 | continue |
|
132 | 132 | name = name[len(subdir):] |
|
133 | 133 | directory = False |
|
134 | 134 | |
|
135 | 135 | if '/' in name: |
|
136 | 136 | if not descend: |
|
137 | 137 | continue |
|
138 | 138 | |
|
139 | 139 | nameparts = name.split('/') |
|
140 | 140 | rootname = nameparts[0] |
|
141 | 141 | |
|
142 | 142 | if not collapse: |
|
143 | 143 | pass |
|
144 | 144 | elif rootname in seendirs: |
|
145 | 145 | continue |
|
146 | 146 | elif rootname in seenrepos: |
|
147 | 147 | pass |
|
148 | 148 | else: |
|
149 | 149 | directory = True |
|
150 | 150 | name = rootname |
|
151 | 151 | |
|
152 | 152 | # redefine the path to refer to the directory |
|
153 | 153 | discarded = '/'.join(nameparts[1:]) |
|
154 | 154 | |
|
155 | 155 | # remove name parts plus accompanying slash |
|
156 | 156 | path = path[:-len(discarded) - 1] |
|
157 | 157 | |
|
158 | 158 | try: |
|
159 | 159 | r = hg.repository(ui, path) |
|
160 | 160 | directory = False |
|
161 | 161 | except (IOError, error.RepoError): |
|
162 | 162 | pass |
|
163 | 163 | |
|
164 | 164 | parts = [ |
|
165 | 165 | req.apppath.strip('/'), |
|
166 | 166 | subdir.strip('/'), |
|
167 | 167 | name.strip('/'), |
|
168 | 168 | ] |
|
169 | 169 | url = '/' + '/'.join(p for p in parts if p) + '/' |
|
170 | 170 | |
|
171 | 171 | # show either a directory entry or a repository |
|
172 | 172 | if directory: |
|
173 | 173 | # get the directory's time information |
|
174 | 174 | try: |
|
175 | 175 | d = (get_mtime(path), dateutil.makedate()[1]) |
|
176 | 176 | except OSError: |
|
177 | 177 | continue |
|
178 | 178 | |
|
179 | 179 | # add '/' to the name to make it obvious that |
|
180 | 180 | # the entry is a directory, not a regular repository |
|
181 | 181 | row = {'contact': "", |
|
182 | 182 | 'contact_sort': "", |
|
183 | 183 | 'name': name + '/', |
|
184 | 184 | 'name_sort': name, |
|
185 | 185 | 'url': url, |
|
186 | 186 | 'description': "", |
|
187 | 187 | 'description_sort': "", |
|
188 | 188 | 'lastchange': d, |
|
189 | 189 | 'lastchange_sort': d[1] - d[0], |
|
190 | 190 | 'archives': [], |
|
191 | 191 | 'isdirectory': True, |
|
192 | 192 | 'labels': [], |
|
193 | 193 | } |
|
194 | 194 | |
|
195 | 195 | seendirs.add(name) |
|
196 | 196 | yield row |
|
197 | 197 | continue |
|
198 | 198 | |
|
199 | 199 | u = ui.copy() |
|
200 | 200 | try: |
|
201 | 201 | u.readconfig(os.path.join(path, '.hg', 'hgrc')) |
|
202 | 202 | except Exception as e: |
|
203 | 203 | u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e)) |
|
204 | 204 | continue |
|
205 | 205 | |
|
206 | 206 | def get(section, name, default=uimod._unset): |
|
207 | 207 | return u.config(section, name, default, untrusted=True) |
|
208 | 208 | |
|
209 | 209 | if u.configbool("web", "hidden", untrusted=True): |
|
210 | 210 | continue |
|
211 | 211 | |
|
212 | 212 | if not readallowed(u, req): |
|
213 | 213 | continue |
|
214 | 214 | |
|
215 | 215 | # update time with local timezone |
|
216 | 216 | try: |
|
217 | 217 | r = hg.repository(ui, path) |
|
218 | 218 | except IOError: |
|
219 | 219 | u.warn(_('error accessing repository at %s\n') % path) |
|
220 | 220 | continue |
|
221 | 221 | except error.RepoError: |
|
222 | 222 | u.warn(_('error accessing repository at %s\n') % path) |
|
223 | 223 | continue |
|
224 | 224 | try: |
|
225 | 225 | d = (get_mtime(r.spath), dateutil.makedate()[1]) |
|
226 | 226 | except OSError: |
|
227 | 227 | continue |
|
228 | 228 | |
|
229 | 229 | contact = get_contact(get) |
|
230 | 230 | description = get("web", "description") |
|
231 | 231 | seenrepos.add(name) |
|
232 | 232 | name = get("web", "name", name) |
|
233 | 233 | row = {'contact': contact or "unknown", |
|
234 | 234 | 'contact_sort': contact.upper() or "unknown", |
|
235 | 235 | 'name': name, |
|
236 | 236 | 'name_sort': name, |
|
237 | 237 | 'url': url, |
|
238 | 238 | 'description': description or "unknown", |
|
239 | 239 | 'description_sort': description.upper() or "unknown", |
|
240 | 240 | 'lastchange': d, |
|
241 | 241 | 'lastchange_sort': d[1] - d[0], |
|
242 | 242 | 'archives': archivelist(u, "tip", url), |
|
243 | 243 | 'isdirectory': None, |
|
244 | 244 | 'labels': u.configlist('web', 'labels', untrusted=True), |
|
245 | 245 | } |
|
246 | 246 | |
|
247 | 247 | yield row |
|
248 | 248 | |
|
249 | 249 | def indexentries(ui, repos, req, stripecount, sortcolumn='', |
|
250 | 250 | descending=False, subdir=''): |
|
251 | 251 | |
|
252 | 252 | rows = rawindexentries(ui, repos, req, subdir=subdir) |
|
253 | 253 | |
|
254 | 254 | sortdefault = None, False |
|
255 | 255 | |
|
256 | 256 | if sortcolumn and sortdefault != (sortcolumn, descending): |
|
257 | 257 | sortkey = '%s_sort' % sortcolumn |
|
258 | 258 | rows = sorted(rows, key=lambda x: x[sortkey], |
|
259 | 259 | reverse=descending) |
|
260 | 260 | |
|
261 | 261 | for row, parity in zip(rows, paritygen(stripecount)): |
|
262 | 262 | row['parity'] = parity |
|
263 | 263 | yield row |
|
264 | 264 | |
|
265 | 265 | class hgwebdir(object): |
|
266 | 266 | """HTTP server for multiple repositories. |
|
267 | 267 | |
|
268 | 268 | Given a configuration, different repositories will be served depending |
|
269 | 269 | on the request path. |
|
270 | 270 | |
|
271 | 271 | Instances are typically used as WSGI applications. |
|
272 | 272 | """ |
|
273 | 273 | def __init__(self, conf, baseui=None): |
|
274 | 274 | self.conf = conf |
|
275 | 275 | self.baseui = baseui |
|
276 | 276 | self.ui = None |
|
277 | 277 | self.lastrefresh = 0 |
|
278 | 278 | self.motd = None |
|
279 | 279 | self.refresh() |
|
280 | 280 | |
|
281 | 281 | def refresh(self): |
|
282 | 282 | if self.ui: |
|
283 | 283 | refreshinterval = self.ui.configint('web', 'refreshinterval') |
|
284 | 284 | else: |
|
285 | 285 | item = configitems.coreitems['web']['refreshinterval'] |
|
286 | 286 | refreshinterval = item.default |
|
287 | 287 | |
|
288 | 288 | # refreshinterval <= 0 means to always refresh. |
|
289 | 289 | if (refreshinterval > 0 and |
|
290 | 290 | self.lastrefresh + refreshinterval > time.time()): |
|
291 | 291 | return |
|
292 | 292 | |
|
293 | 293 | if self.baseui: |
|
294 | 294 | u = self.baseui.copy() |
|
295 | 295 | else: |
|
296 | 296 | u = uimod.ui.load() |
|
297 | 297 | u.setconfig('ui', 'report_untrusted', 'off', 'hgwebdir') |
|
298 | 298 | u.setconfig('ui', 'nontty', 'true', 'hgwebdir') |
|
299 | 299 | # displaying bundling progress bar while serving feels wrong and may |
|
300 | 300 | # break some wsgi implementations. |
|
301 | 301 | u.setconfig('progress', 'disable', 'true', 'hgweb') |
|
302 | 302 | |
|
303 | 303 | if not isinstance(self.conf, (dict, list, tuple)): |
|
304 | 304 | map = {'paths': 'hgweb-paths'} |
|
305 | 305 | if not os.path.exists(self.conf): |
|
306 | 306 | raise error.Abort(_('config file %s not found!') % self.conf) |
|
307 | 307 | u.readconfig(self.conf, remap=map, trust=True) |
|
308 | 308 | paths = [] |
|
309 | 309 | for name, ignored in u.configitems('hgweb-paths'): |
|
310 | 310 | for path in u.configlist('hgweb-paths', name): |
|
311 | 311 | paths.append((name, path)) |
|
312 | 312 | elif isinstance(self.conf, (list, tuple)): |
|
313 | 313 | paths = self.conf |
|
314 | 314 | elif isinstance(self.conf, dict): |
|
315 | 315 | paths = self.conf.items() |
|
316 | 316 | |
|
317 | 317 | repos = findrepos(paths) |
|
318 | 318 | for prefix, root in u.configitems('collections'): |
|
319 | 319 | prefix = util.pconvert(prefix) |
|
320 | 320 | for path in scmutil.walkrepos(root, followsym=True): |
|
321 | 321 | repo = os.path.normpath(path) |
|
322 | 322 | name = util.pconvert(repo) |
|
323 | 323 | if name.startswith(prefix): |
|
324 | 324 | name = name[len(prefix):] |
|
325 | 325 | repos.append((name.lstrip('/'), repo)) |
|
326 | 326 | |
|
327 | 327 | self.repos = repos |
|
328 | 328 | self.ui = u |
|
329 | 329 | encoding.encoding = self.ui.config('web', 'encoding') |
|
330 | 330 | self.style = self.ui.config('web', 'style') |
|
331 | 331 | self.templatepath = self.ui.config('web', 'templates', untrusted=False) |
|
332 | 332 | self.stripecount = self.ui.config('web', 'stripes') |
|
333 | 333 | if self.stripecount: |
|
334 | 334 | self.stripecount = int(self.stripecount) |
|
335 | 335 | prefix = self.ui.config('web', 'prefix') |
|
336 | 336 | if prefix.startswith('/'): |
|
337 | 337 | prefix = prefix[1:] |
|
338 | 338 | if prefix.endswith('/'): |
|
339 | 339 | prefix = prefix[:-1] |
|
340 | 340 | self.prefix = prefix |
|
341 | 341 | self.lastrefresh = time.time() |
|
342 | 342 | |
|
343 | 343 | def run(self): |
|
344 | 344 | if not encoding.environ.get('GATEWAY_INTERFACE', |
|
345 | 345 | '').startswith("CGI/1."): |
|
346 | 346 | raise RuntimeError("This function is only intended to be " |
|
347 | 347 | "called while running as a CGI script.") |
|
348 | 348 | wsgicgi.launch(self) |
|
349 | 349 | |
|
350 | 350 | def __call__(self, env, respond): |
|
351 | 351 | baseurl = self.ui.config('web', 'baseurl') |
|
352 | 352 | req = requestmod.parserequestfromenv(env, altbaseurl=baseurl) |
|
353 | 353 | res = requestmod.wsgiresponse(req, respond) |
|
354 | 354 | |
|
355 | 355 | return self.run_wsgi(req, res) |
|
356 | 356 | |
|
357 | 357 | def run_wsgi(self, req, res): |
|
358 | 358 | profile = self.ui.configbool('profiling', 'enabled') |
|
359 | 359 | with profiling.profile(self.ui, enabled=profile): |
|
360 | 360 | try: |
|
361 | 361 | for r in self._runwsgi(req, res): |
|
362 | 362 | yield r |
|
363 | 363 | finally: |
|
364 | 364 | # There are known cycles in localrepository that prevent |
|
365 | 365 | # those objects (and tons of held references) from being |
|
366 | 366 | # collected through normal refcounting. We mitigate those |
|
367 | 367 | # leaks by performing an explicit GC on every request. |
|
368 | 368 | # TODO remove this once leaks are fixed. |
|
369 | 369 | # TODO only run this on requests that create localrepository |
|
370 | 370 | # instances instead of every request. |
|
371 | 371 | gc.collect() |
|
372 | 372 | |
|
373 | 373 | def _runwsgi(self, req, res): |
|
374 | 374 | try: |
|
375 | 375 | self.refresh() |
|
376 | 376 | |
|
377 | 377 | csp, nonce = cspvalues(self.ui) |
|
378 | 378 | if csp: |
|
379 | 379 | res.headers['Content-Security-Policy'] = csp |
|
380 | 380 | |
|
381 | 381 | virtual = req.dispatchpath.strip('/') |
|
382 | 382 | tmpl = self.templater(req, nonce) |
|
383 | 383 | ctype = tmpl.render('mimetype', {'encoding': encoding.encoding}) |
|
384 | 384 | |
|
385 | 385 | # Global defaults. These can be overridden by any handler. |
|
386 | 386 | res.status = '200 Script output follows' |
|
387 | 387 | res.headers['Content-Type'] = ctype |
|
388 | 388 | |
|
389 | 389 | # a static file |
|
390 | 390 | if virtual.startswith('static/') or 'static' in req.qsparams: |
|
391 | 391 | if virtual.startswith('static/'): |
|
392 | 392 | fname = virtual[7:] |
|
393 | 393 | else: |
|
394 | 394 | fname = req.qsparams['static'] |
|
395 | 395 | static = self.ui.config("web", "static", None, |
|
396 | 396 | untrusted=False) |
|
397 | 397 | if not static: |
|
398 | 398 | tp = self.templatepath or templater.templatepaths() |
|
399 | 399 | if isinstance(tp, str): |
|
400 | 400 | tp = [tp] |
|
401 | 401 | static = [os.path.join(p, 'static') for p in tp] |
|
402 | 402 | |
|
403 | 403 | staticfile(static, fname, res) |
|
404 | 404 | return res.sendresponse() |
|
405 | 405 | |
|
406 | 406 | # top-level index |
|
407 | 407 | |
|
408 | 408 | repos = dict(self.repos) |
|
409 | 409 | |
|
410 | 410 | if (not virtual or virtual == 'index') and virtual not in repos: |
|
411 | 411 | return self.makeindex(req, res, tmpl) |
|
412 | 412 | |
|
413 | 413 | # nested indexes and hgwebs |
|
414 | 414 | |
|
415 | 415 | if virtual.endswith('/index') and virtual not in repos: |
|
416 | 416 | subdir = virtual[:-len('index')] |
|
417 | 417 | if any(r.startswith(subdir) for r in repos): |
|
418 | 418 | return self.makeindex(req, res, tmpl, subdir) |
|
419 | 419 | |
|
420 | 420 | def _virtualdirs(): |
|
421 | 421 | # Check the full virtual path, each parent, and the root ('') |
|
422 | 422 | if virtual != '': |
|
423 | 423 | yield virtual |
|
424 | 424 | |
|
425 | 425 | for p in util.finddirs(virtual): |
|
426 | 426 | yield p |
|
427 | 427 | |
|
428 | 428 | yield '' |
|
429 | 429 | |
|
430 | 430 | for virtualrepo in _virtualdirs(): |
|
431 | 431 | real = repos.get(virtualrepo) |
|
432 | 432 | if real: |
|
433 | 433 | # Re-parse the WSGI environment to take into account our |
|
434 | 434 | # repository path component. |
|
435 | 435 | req = requestmod.parserequestfromenv( |
|
436 | 436 | req.rawenv, reponame=virtualrepo, |
|
437 | 437 | altbaseurl=self.ui.config('web', 'baseurl')) |
|
438 | 438 | try: |
|
439 | 439 | # ensure caller gets private copy of ui |
|
440 | 440 | repo = hg.repository(self.ui.copy(), real) |
|
441 | 441 | return hgweb_mod.hgweb(repo).run_wsgi(req, res) |
|
442 | 442 | except IOError as inst: |
|
443 | 443 | msg = encoding.strtolocal(inst.strerror) |
|
444 | 444 | raise ErrorResponse(HTTP_SERVER_ERROR, msg) |
|
445 | 445 | except error.RepoError as inst: |
|
446 | 446 | raise ErrorResponse(HTTP_SERVER_ERROR, bytes(inst)) |
|
447 | 447 | |
|
448 | 448 | # browse subdirectories |
|
449 | 449 | subdir = virtual + '/' |
|
450 | 450 | if [r for r in repos if r.startswith(subdir)]: |
|
451 | 451 | return self.makeindex(req, res, tmpl, subdir) |
|
452 | 452 | |
|
453 | 453 | # prefixes not found |
|
454 | 454 | res.status = '404 Not Found' |
|
455 |
res.setbodygen(tmpl('notfound', repo |
|
|
455 | res.setbodygen(tmpl.generate('notfound', {'repo': virtual})) | |
|
456 | 456 | return res.sendresponse() |
|
457 | 457 | |
|
458 | 458 | except ErrorResponse as e: |
|
459 | 459 | res.status = statusmessage(e.code, pycompat.bytestr(e)) |
|
460 |
res.setbodygen(tmpl('error', error |
|
|
460 | res.setbodygen(tmpl.generate('error', {'error': e.message or ''})) | |
|
461 | 461 | return res.sendresponse() |
|
462 | 462 | finally: |
|
463 | 463 | tmpl = None |
|
464 | 464 | |
|
465 | 465 | def makeindex(self, req, res, tmpl, subdir=""): |
|
466 | 466 | self.refresh() |
|
467 | 467 | sortable = ["name", "description", "contact", "lastchange"] |
|
468 | 468 | sortcolumn, descending = None, False |
|
469 | 469 | if 'sort' in req.qsparams: |
|
470 | 470 | sortcolumn = req.qsparams['sort'] |
|
471 | 471 | descending = sortcolumn.startswith('-') |
|
472 | 472 | if descending: |
|
473 | 473 | sortcolumn = sortcolumn[1:] |
|
474 | 474 | if sortcolumn not in sortable: |
|
475 | 475 | sortcolumn = "" |
|
476 | 476 | |
|
477 | 477 | sort = [("sort_%s" % column, |
|
478 | 478 | "%s%s" % ((not descending and column == sortcolumn) |
|
479 | 479 | and "-" or "", column)) |
|
480 | 480 | for column in sortable] |
|
481 | 481 | |
|
482 | 482 | self.refresh() |
|
483 | 483 | |
|
484 | 484 | entries = indexentries(self.ui, self.repos, req, |
|
485 | 485 | self.stripecount, sortcolumn=sortcolumn, |
|
486 | 486 | descending=descending, subdir=subdir) |
|
487 | 487 | |
|
488 | res.setbodygen(tmpl( | |
|
489 |
' |
|
|
490 | entries=entries, | |
|
491 | subdir=subdir, | |
|
492 | pathdef=hgweb_mod.makebreadcrumb('/' + subdir, self.prefix), | |
|
493 | sortcolumn=sortcolumn, | |
|
494 | descending=descending, | |
|
495 | **dict(sort))) | |
|
496 | ||
|
488 | mapping = { | |
|
489 | 'entries': entries, | |
|
490 | 'subdir': subdir, | |
|
491 | 'pathdef': hgweb_mod.makebreadcrumb('/' + subdir, self.prefix), | |
|
492 | 'sortcolumn': sortcolumn, | |
|
493 | 'descending': descending, | |
|
494 | } | |
|
495 | mapping.update(sort) | |
|
496 | res.setbodygen(tmpl.generate('index', mapping)) | |
|
497 | 497 | return res.sendresponse() |
|
498 | 498 | |
|
499 | 499 | def templater(self, req, nonce): |
|
500 | 500 | |
|
501 | 501 | def motd(**map): |
|
502 | 502 | if self.motd is not None: |
|
503 | 503 | yield self.motd |
|
504 | 504 | else: |
|
505 | 505 | yield config('web', 'motd') |
|
506 | 506 | |
|
507 | 507 | def config(section, name, default=uimod._unset, untrusted=True): |
|
508 | 508 | return self.ui.config(section, name, default, untrusted) |
|
509 | 509 | |
|
510 | 510 | vars = {} |
|
511 | 511 | styles, (style, mapfile) = hgweb_mod.getstyle(req, config, |
|
512 | 512 | self.templatepath) |
|
513 | 513 | if style == styles[0]: |
|
514 | 514 | vars['style'] = style |
|
515 | 515 | |
|
516 | 516 | sessionvars = webutil.sessionvars(vars, r'?') |
|
517 | 517 | logourl = config('web', 'logourl') |
|
518 | 518 | logoimg = config('web', 'logoimg') |
|
519 | 519 | staticurl = (config('web', 'staticurl') |
|
520 | 520 | or req.apppath + '/static/') |
|
521 | 521 | if not staticurl.endswith('/'): |
|
522 | 522 | staticurl += '/' |
|
523 | 523 | |
|
524 | 524 | defaults = { |
|
525 | 525 | "encoding": encoding.encoding, |
|
526 | 526 | "motd": motd, |
|
527 | 527 | "url": req.apppath + '/', |
|
528 | 528 | "logourl": logourl, |
|
529 | 529 | "logoimg": logoimg, |
|
530 | 530 | "staticurl": staticurl, |
|
531 | 531 | "sessionvars": sessionvars, |
|
532 | 532 | "style": style, |
|
533 | 533 | "nonce": nonce, |
|
534 | 534 | } |
|
535 | 535 | tmpl = templater.templater.frommapfile(mapfile, defaults=defaults) |
|
536 | 536 | return tmpl |
@@ -1,1486 +1,1485 b'' | |||
|
1 | 1 | # |
|
2 | 2 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
3 | 3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | import copy |
|
11 | 11 | import mimetypes |
|
12 | 12 | import os |
|
13 | 13 | import re |
|
14 | 14 | |
|
15 | 15 | from ..i18n import _ |
|
16 | 16 | from ..node import hex, nullid, short |
|
17 | 17 | |
|
18 | 18 | from .common import ( |
|
19 | 19 | ErrorResponse, |
|
20 | 20 | HTTP_FORBIDDEN, |
|
21 | 21 | HTTP_NOT_FOUND, |
|
22 | 22 | get_contact, |
|
23 | 23 | paritygen, |
|
24 | 24 | staticfile, |
|
25 | 25 | ) |
|
26 | 26 | |
|
27 | 27 | from .. import ( |
|
28 | 28 | archival, |
|
29 | 29 | dagop, |
|
30 | 30 | encoding, |
|
31 | 31 | error, |
|
32 | 32 | graphmod, |
|
33 | 33 | pycompat, |
|
34 | 34 | revset, |
|
35 | 35 | revsetlang, |
|
36 | 36 | scmutil, |
|
37 | 37 | smartset, |
|
38 | 38 | templater, |
|
39 | 39 | util, |
|
40 | 40 | ) |
|
41 | 41 | |
|
42 | 42 | from . import ( |
|
43 | 43 | webutil, |
|
44 | 44 | ) |
|
45 | 45 | |
|
46 | 46 | __all__ = [] |
|
47 | 47 | commands = {} |
|
48 | 48 | |
|
49 | 49 | class webcommand(object): |
|
50 | 50 | """Decorator used to register a web command handler. |
|
51 | 51 | |
|
52 | 52 | The decorator takes as its positional arguments the name/path the |
|
53 | 53 | command should be accessible under. |
|
54 | 54 | |
|
55 | 55 | When called, functions receive as arguments a ``requestcontext``, |
|
56 | 56 | ``wsgirequest``, and a templater instance for generatoring output. |
|
57 | 57 | The functions should populate the ``rctx.res`` object with details |
|
58 | 58 | about the HTTP response. |
|
59 | 59 | |
|
60 | 60 | The function returns a generator to be consumed by the WSGI application. |
|
61 | 61 | For most commands, this should be the result from |
|
62 | 62 | ``web.res.sendresponse()``. Many commands will call ``web.sendtemplate()`` |
|
63 | 63 | to render a template. |
|
64 | 64 | |
|
65 | 65 | Usage: |
|
66 | 66 | |
|
67 | 67 | @webcommand('mycommand') |
|
68 | 68 | def mycommand(web): |
|
69 | 69 | pass |
|
70 | 70 | """ |
|
71 | 71 | |
|
72 | 72 | def __init__(self, name): |
|
73 | 73 | self.name = name |
|
74 | 74 | |
|
75 | 75 | def __call__(self, func): |
|
76 | 76 | __all__.append(self.name) |
|
77 | 77 | commands[self.name] = func |
|
78 | 78 | return func |
|
79 | 79 | |
|
80 | 80 | @webcommand('log') |
|
81 | 81 | def log(web): |
|
82 | 82 | """ |
|
83 | 83 | /log[/{revision}[/{path}]] |
|
84 | 84 | -------------------------- |
|
85 | 85 | |
|
86 | 86 | Show repository or file history. |
|
87 | 87 | |
|
88 | 88 | For URLs of the form ``/log/{revision}``, a list of changesets starting at |
|
89 | 89 | the specified changeset identifier is shown. If ``{revision}`` is not |
|
90 | 90 | defined, the default is ``tip``. This form is equivalent to the |
|
91 | 91 | ``changelog`` handler. |
|
92 | 92 | |
|
93 | 93 | For URLs of the form ``/log/{revision}/{file}``, the history for a specific |
|
94 | 94 | file will be shown. This form is equivalent to the ``filelog`` handler. |
|
95 | 95 | """ |
|
96 | 96 | |
|
97 | 97 | if web.req.qsparams.get('file'): |
|
98 | 98 | return filelog(web) |
|
99 | 99 | else: |
|
100 | 100 | return changelog(web) |
|
101 | 101 | |
|
102 | 102 | @webcommand('rawfile') |
|
103 | 103 | def rawfile(web): |
|
104 | 104 | guessmime = web.configbool('web', 'guessmime') |
|
105 | 105 | |
|
106 | 106 | path = webutil.cleanpath(web.repo, web.req.qsparams.get('file', '')) |
|
107 | 107 | if not path: |
|
108 | 108 | return manifest(web) |
|
109 | 109 | |
|
110 | 110 | try: |
|
111 | 111 | fctx = webutil.filectx(web.repo, web.req) |
|
112 | 112 | except error.LookupError as inst: |
|
113 | 113 | try: |
|
114 | 114 | return manifest(web) |
|
115 | 115 | except ErrorResponse: |
|
116 | 116 | raise inst |
|
117 | 117 | |
|
118 | 118 | path = fctx.path() |
|
119 | 119 | text = fctx.data() |
|
120 | 120 | mt = 'application/binary' |
|
121 | 121 | if guessmime: |
|
122 | 122 | mt = mimetypes.guess_type(path)[0] |
|
123 | 123 | if mt is None: |
|
124 | 124 | if util.binary(text): |
|
125 | 125 | mt = 'application/binary' |
|
126 | 126 | else: |
|
127 | 127 | mt = 'text/plain' |
|
128 | 128 | if mt.startswith('text/'): |
|
129 | 129 | mt += '; charset="%s"' % encoding.encoding |
|
130 | 130 | |
|
131 | 131 | web.res.headers['Content-Type'] = mt |
|
132 | 132 | filename = (path.rpartition('/')[-1] |
|
133 | 133 | .replace('\\', '\\\\').replace('"', '\\"')) |
|
134 | 134 | web.res.headers['Content-Disposition'] = 'inline; filename="%s"' % filename |
|
135 | 135 | web.res.setbodybytes(text) |
|
136 | 136 | return web.res.sendresponse() |
|
137 | 137 | |
|
138 | 138 | def _filerevision(web, fctx): |
|
139 | 139 | f = fctx.path() |
|
140 | 140 | text = fctx.data() |
|
141 | 141 | parity = paritygen(web.stripecount) |
|
142 | 142 | ishead = fctx.filerev() in fctx.filelog().headrevs() |
|
143 | 143 | |
|
144 | 144 | if util.binary(text): |
|
145 | 145 | mt = mimetypes.guess_type(f)[0] or 'application/octet-stream' |
|
146 | 146 | text = '(binary:%s)' % mt |
|
147 | 147 | |
|
148 | 148 | def lines(): |
|
149 | 149 | for lineno, t in enumerate(text.splitlines(True)): |
|
150 | 150 | yield {"line": t, |
|
151 | 151 | "lineid": "l%d" % (lineno + 1), |
|
152 | 152 | "linenumber": "% 6d" % (lineno + 1), |
|
153 | 153 | "parity": next(parity)} |
|
154 | 154 | |
|
155 | 155 | return web.sendtemplate( |
|
156 | 156 | 'filerevision', |
|
157 | 157 | file=f, |
|
158 | 158 | path=webutil.up(f), |
|
159 | 159 | text=lines(), |
|
160 | 160 | symrev=webutil.symrevorshortnode(web.req, fctx), |
|
161 | 161 | rename=webutil.renamelink(fctx), |
|
162 | 162 | permissions=fctx.manifest().flags(f), |
|
163 | 163 | ishead=int(ishead), |
|
164 | 164 | **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))) |
|
165 | 165 | |
|
166 | 166 | @webcommand('file') |
|
167 | 167 | def file(web): |
|
168 | 168 | """ |
|
169 | 169 | /file/{revision}[/{path}] |
|
170 | 170 | ------------------------- |
|
171 | 171 | |
|
172 | 172 | Show information about a directory or file in the repository. |
|
173 | 173 | |
|
174 | 174 | Info about the ``path`` given as a URL parameter will be rendered. |
|
175 | 175 | |
|
176 | 176 | If ``path`` is a directory, information about the entries in that |
|
177 | 177 | directory will be rendered. This form is equivalent to the ``manifest`` |
|
178 | 178 | handler. |
|
179 | 179 | |
|
180 | 180 | If ``path`` is a file, information about that file will be shown via |
|
181 | 181 | the ``filerevision`` template. |
|
182 | 182 | |
|
183 | 183 | If ``path`` is not defined, information about the root directory will |
|
184 | 184 | be rendered. |
|
185 | 185 | """ |
|
186 | 186 | if web.req.qsparams.get('style') == 'raw': |
|
187 | 187 | return rawfile(web) |
|
188 | 188 | |
|
189 | 189 | path = webutil.cleanpath(web.repo, web.req.qsparams.get('file', '')) |
|
190 | 190 | if not path: |
|
191 | 191 | return manifest(web) |
|
192 | 192 | try: |
|
193 | 193 | return _filerevision(web, webutil.filectx(web.repo, web.req)) |
|
194 | 194 | except error.LookupError as inst: |
|
195 | 195 | try: |
|
196 | 196 | return manifest(web) |
|
197 | 197 | except ErrorResponse: |
|
198 | 198 | raise inst |
|
199 | 199 | |
|
200 | 200 | def _search(web): |
|
201 | 201 | MODE_REVISION = 'rev' |
|
202 | 202 | MODE_KEYWORD = 'keyword' |
|
203 | 203 | MODE_REVSET = 'revset' |
|
204 | 204 | |
|
205 | 205 | def revsearch(ctx): |
|
206 | 206 | yield ctx |
|
207 | 207 | |
|
208 | 208 | def keywordsearch(query): |
|
209 | 209 | lower = encoding.lower |
|
210 | 210 | qw = lower(query).split() |
|
211 | 211 | |
|
212 | 212 | def revgen(): |
|
213 | 213 | cl = web.repo.changelog |
|
214 | 214 | for i in xrange(len(web.repo) - 1, 0, -100): |
|
215 | 215 | l = [] |
|
216 | 216 | for j in cl.revs(max(0, i - 99), i): |
|
217 | 217 | ctx = web.repo[j] |
|
218 | 218 | l.append(ctx) |
|
219 | 219 | l.reverse() |
|
220 | 220 | for e in l: |
|
221 | 221 | yield e |
|
222 | 222 | |
|
223 | 223 | for ctx in revgen(): |
|
224 | 224 | miss = 0 |
|
225 | 225 | for q in qw: |
|
226 | 226 | if not (q in lower(ctx.user()) or |
|
227 | 227 | q in lower(ctx.description()) or |
|
228 | 228 | q in lower(" ".join(ctx.files()))): |
|
229 | 229 | miss = 1 |
|
230 | 230 | break |
|
231 | 231 | if miss: |
|
232 | 232 | continue |
|
233 | 233 | |
|
234 | 234 | yield ctx |
|
235 | 235 | |
|
236 | 236 | def revsetsearch(revs): |
|
237 | 237 | for r in revs: |
|
238 | 238 | yield web.repo[r] |
|
239 | 239 | |
|
240 | 240 | searchfuncs = { |
|
241 | 241 | MODE_REVISION: (revsearch, 'exact revision search'), |
|
242 | 242 | MODE_KEYWORD: (keywordsearch, 'literal keyword search'), |
|
243 | 243 | MODE_REVSET: (revsetsearch, 'revset expression search'), |
|
244 | 244 | } |
|
245 | 245 | |
|
246 | 246 | def getsearchmode(query): |
|
247 | 247 | try: |
|
248 | 248 | ctx = web.repo[query] |
|
249 | 249 | except (error.RepoError, error.LookupError): |
|
250 | 250 | # query is not an exact revision pointer, need to |
|
251 | 251 | # decide if it's a revset expression or keywords |
|
252 | 252 | pass |
|
253 | 253 | else: |
|
254 | 254 | return MODE_REVISION, ctx |
|
255 | 255 | |
|
256 | 256 | revdef = 'reverse(%s)' % query |
|
257 | 257 | try: |
|
258 | 258 | tree = revsetlang.parse(revdef) |
|
259 | 259 | except error.ParseError: |
|
260 | 260 | # can't parse to a revset tree |
|
261 | 261 | return MODE_KEYWORD, query |
|
262 | 262 | |
|
263 | 263 | if revsetlang.depth(tree) <= 2: |
|
264 | 264 | # no revset syntax used |
|
265 | 265 | return MODE_KEYWORD, query |
|
266 | 266 | |
|
267 | 267 | if any((token, (value or '')[:3]) == ('string', 're:') |
|
268 | 268 | for token, value, pos in revsetlang.tokenize(revdef)): |
|
269 | 269 | return MODE_KEYWORD, query |
|
270 | 270 | |
|
271 | 271 | funcsused = revsetlang.funcsused(tree) |
|
272 | 272 | if not funcsused.issubset(revset.safesymbols): |
|
273 | 273 | return MODE_KEYWORD, query |
|
274 | 274 | |
|
275 | 275 | mfunc = revset.match(web.repo.ui, revdef, repo=web.repo) |
|
276 | 276 | try: |
|
277 | 277 | revs = mfunc(web.repo) |
|
278 | 278 | return MODE_REVSET, revs |
|
279 | 279 | # ParseError: wrongly placed tokens, wrongs arguments, etc |
|
280 | 280 | # RepoLookupError: no such revision, e.g. in 'revision:' |
|
281 | 281 | # Abort: bookmark/tag not exists |
|
282 | 282 | # LookupError: ambiguous identifier, e.g. in '(bc)' on a large repo |
|
283 | 283 | except (error.ParseError, error.RepoLookupError, error.Abort, |
|
284 | 284 | LookupError): |
|
285 | 285 | return MODE_KEYWORD, query |
|
286 | 286 | |
|
287 | 287 | def changelist(**map): |
|
288 | 288 | count = 0 |
|
289 | 289 | |
|
290 | 290 | for ctx in searchfunc[0](funcarg): |
|
291 | 291 | count += 1 |
|
292 | 292 | n = ctx.node() |
|
293 | 293 | showtags = webutil.showtag(web.repo, web.tmpl, 'changelogtag', n) |
|
294 | 294 | files = webutil.listfilediffs(web.tmpl, ctx.files(), n, |
|
295 | 295 | web.maxfiles) |
|
296 | 296 | |
|
297 | yield web.tmpl( | |
|
298 | 'searchentry', | |
|
299 |
parity |
|
|
300 |
changelogtag |
|
|
301 |
files |
|
|
302 | **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))) | |
|
297 | lm = webutil.commonentry(web.repo, ctx) | |
|
298 | lm.update({ | |
|
299 | 'parity': next(parity), | |
|
300 | 'changelogtag': showtags, | |
|
301 | 'files': files, | |
|
302 | }) | |
|
303 | yield web.tmpl.generate('searchentry', lm) | |
|
303 | 304 | |
|
304 | 305 | if count >= revcount: |
|
305 | 306 | break |
|
306 | 307 | |
|
307 | 308 | query = web.req.qsparams['rev'] |
|
308 | 309 | revcount = web.maxchanges |
|
309 | 310 | if 'revcount' in web.req.qsparams: |
|
310 | 311 | try: |
|
311 | 312 | revcount = int(web.req.qsparams.get('revcount', revcount)) |
|
312 | 313 | revcount = max(revcount, 1) |
|
313 | 314 | web.tmpl.defaults['sessionvars']['revcount'] = revcount |
|
314 | 315 | except ValueError: |
|
315 | 316 | pass |
|
316 | 317 | |
|
317 | 318 | lessvars = copy.copy(web.tmpl.defaults['sessionvars']) |
|
318 | 319 | lessvars['revcount'] = max(revcount // 2, 1) |
|
319 | 320 | lessvars['rev'] = query |
|
320 | 321 | morevars = copy.copy(web.tmpl.defaults['sessionvars']) |
|
321 | 322 | morevars['revcount'] = revcount * 2 |
|
322 | 323 | morevars['rev'] = query |
|
323 | 324 | |
|
324 | 325 | mode, funcarg = getsearchmode(query) |
|
325 | 326 | |
|
326 | 327 | if 'forcekw' in web.req.qsparams: |
|
327 | 328 | showforcekw = '' |
|
328 | 329 | showunforcekw = searchfuncs[mode][1] |
|
329 | 330 | mode = MODE_KEYWORD |
|
330 | 331 | funcarg = query |
|
331 | 332 | else: |
|
332 | 333 | if mode != MODE_KEYWORD: |
|
333 | 334 | showforcekw = searchfuncs[MODE_KEYWORD][1] |
|
334 | 335 | else: |
|
335 | 336 | showforcekw = '' |
|
336 | 337 | showunforcekw = '' |
|
337 | 338 | |
|
338 | 339 | searchfunc = searchfuncs[mode] |
|
339 | 340 | |
|
340 | 341 | tip = web.repo['tip'] |
|
341 | 342 | parity = paritygen(web.stripecount) |
|
342 | 343 | |
|
343 | 344 | return web.sendtemplate( |
|
344 | 345 | 'search', |
|
345 | 346 | query=query, |
|
346 | 347 | node=tip.hex(), |
|
347 | 348 | symrev='tip', |
|
348 | 349 | entries=changelist, |
|
349 | 350 | archives=web.archivelist('tip'), |
|
350 | 351 | morevars=morevars, |
|
351 | 352 | lessvars=lessvars, |
|
352 | 353 | modedesc=searchfunc[1], |
|
353 | 354 | showforcekw=showforcekw, |
|
354 | 355 | showunforcekw=showunforcekw) |
|
355 | 356 | |
|
356 | 357 | @webcommand('changelog') |
|
357 | 358 | def changelog(web, shortlog=False): |
|
358 | 359 | """ |
|
359 | 360 | /changelog[/{revision}] |
|
360 | 361 | ----------------------- |
|
361 | 362 | |
|
362 | 363 | Show information about multiple changesets. |
|
363 | 364 | |
|
364 | 365 | If the optional ``revision`` URL argument is absent, information about |
|
365 | 366 | all changesets starting at ``tip`` will be rendered. If the ``revision`` |
|
366 | 367 | argument is present, changesets will be shown starting from the specified |
|
367 | 368 | revision. |
|
368 | 369 | |
|
369 | 370 | If ``revision`` is absent, the ``rev`` query string argument may be |
|
370 | 371 | defined. This will perform a search for changesets. |
|
371 | 372 | |
|
372 | 373 | The argument for ``rev`` can be a single revision, a revision set, |
|
373 | 374 | or a literal keyword to search for in changeset data (equivalent to |
|
374 | 375 | :hg:`log -k`). |
|
375 | 376 | |
|
376 | 377 | The ``revcount`` query string argument defines the maximum numbers of |
|
377 | 378 | changesets to render. |
|
378 | 379 | |
|
379 | 380 | For non-searches, the ``changelog`` template will be rendered. |
|
380 | 381 | """ |
|
381 | 382 | |
|
382 | 383 | query = '' |
|
383 | 384 | if 'node' in web.req.qsparams: |
|
384 | 385 | ctx = webutil.changectx(web.repo, web.req) |
|
385 | 386 | symrev = webutil.symrevorshortnode(web.req, ctx) |
|
386 | 387 | elif 'rev' in web.req.qsparams: |
|
387 | 388 | return _search(web) |
|
388 | 389 | else: |
|
389 | 390 | ctx = web.repo['tip'] |
|
390 | 391 | symrev = 'tip' |
|
391 | 392 | |
|
392 | 393 | def changelist(): |
|
393 | 394 | revs = [] |
|
394 | 395 | if pos != -1: |
|
395 | 396 | revs = web.repo.changelog.revs(pos, 0) |
|
396 | 397 | curcount = 0 |
|
397 | 398 | for rev in revs: |
|
398 | 399 | curcount += 1 |
|
399 | 400 | if curcount > revcount + 1: |
|
400 | 401 | break |
|
401 | 402 | |
|
402 | 403 | entry = webutil.changelistentry(web, web.repo[rev]) |
|
403 | 404 | entry['parity'] = next(parity) |
|
404 | 405 | yield entry |
|
405 | 406 | |
|
406 | 407 | if shortlog: |
|
407 | 408 | revcount = web.maxshortchanges |
|
408 | 409 | else: |
|
409 | 410 | revcount = web.maxchanges |
|
410 | 411 | |
|
411 | 412 | if 'revcount' in web.req.qsparams: |
|
412 | 413 | try: |
|
413 | 414 | revcount = int(web.req.qsparams.get('revcount', revcount)) |
|
414 | 415 | revcount = max(revcount, 1) |
|
415 | 416 | web.tmpl.defaults['sessionvars']['revcount'] = revcount |
|
416 | 417 | except ValueError: |
|
417 | 418 | pass |
|
418 | 419 | |
|
419 | 420 | lessvars = copy.copy(web.tmpl.defaults['sessionvars']) |
|
420 | 421 | lessvars['revcount'] = max(revcount // 2, 1) |
|
421 | 422 | morevars = copy.copy(web.tmpl.defaults['sessionvars']) |
|
422 | 423 | morevars['revcount'] = revcount * 2 |
|
423 | 424 | |
|
424 | 425 | count = len(web.repo) |
|
425 | 426 | pos = ctx.rev() |
|
426 | 427 | parity = paritygen(web.stripecount) |
|
427 | 428 | |
|
428 | 429 | changenav = webutil.revnav(web.repo).gen(pos, revcount, count) |
|
429 | 430 | |
|
430 | 431 | entries = list(changelist()) |
|
431 | 432 | latestentry = entries[:1] |
|
432 | 433 | if len(entries) > revcount: |
|
433 | 434 | nextentry = entries[-1:] |
|
434 | 435 | entries = entries[:-1] |
|
435 | 436 | else: |
|
436 | 437 | nextentry = [] |
|
437 | 438 | |
|
438 | 439 | return web.sendtemplate( |
|
439 | 440 | 'shortlog' if shortlog else 'changelog', |
|
440 | 441 | changenav=changenav, |
|
441 | 442 | node=ctx.hex(), |
|
442 | 443 | rev=pos, |
|
443 | 444 | symrev=symrev, |
|
444 | 445 | changesets=count, |
|
445 | 446 | entries=entries, |
|
446 | 447 | latestentry=latestentry, |
|
447 | 448 | nextentry=nextentry, |
|
448 | 449 | archives=web.archivelist('tip'), |
|
449 | 450 | revcount=revcount, |
|
450 | 451 | morevars=morevars, |
|
451 | 452 | lessvars=lessvars, |
|
452 | 453 | query=query) |
|
453 | 454 | |
|
454 | 455 | @webcommand('shortlog') |
|
455 | 456 | def shortlog(web): |
|
456 | 457 | """ |
|
457 | 458 | /shortlog |
|
458 | 459 | --------- |
|
459 | 460 | |
|
460 | 461 | Show basic information about a set of changesets. |
|
461 | 462 | |
|
462 | 463 | This accepts the same parameters as the ``changelog`` handler. The only |
|
463 | 464 | difference is the ``shortlog`` template will be rendered instead of the |
|
464 | 465 | ``changelog`` template. |
|
465 | 466 | """ |
|
466 | 467 | return changelog(web, shortlog=True) |
|
467 | 468 | |
|
468 | 469 | @webcommand('changeset') |
|
469 | 470 | def changeset(web): |
|
470 | 471 | """ |
|
471 | 472 | /changeset[/{revision}] |
|
472 | 473 | ----------------------- |
|
473 | 474 | |
|
474 | 475 | Show information about a single changeset. |
|
475 | 476 | |
|
476 | 477 | A URL path argument is the changeset identifier to show. See ``hg help |
|
477 | 478 | revisions`` for possible values. If not defined, the ``tip`` changeset |
|
478 | 479 | will be shown. |
|
479 | 480 | |
|
480 | 481 | The ``changeset`` template is rendered. Contents of the ``changesettag``, |
|
481 | 482 | ``changesetbookmark``, ``filenodelink``, ``filenolink``, and the many |
|
482 | 483 | templates related to diffs may all be used to produce the output. |
|
483 | 484 | """ |
|
484 | 485 | ctx = webutil.changectx(web.repo, web.req) |
|
485 | 486 | |
|
486 | 487 | return web.sendtemplate( |
|
487 | 488 | 'changeset', |
|
488 | 489 | **webutil.changesetentry(web, ctx)) |
|
489 | 490 | |
|
490 | 491 | rev = webcommand('rev')(changeset) |
|
491 | 492 | |
|
492 | 493 | def decodepath(path): |
|
493 | 494 | """Hook for mapping a path in the repository to a path in the |
|
494 | 495 | working copy. |
|
495 | 496 | |
|
496 | 497 | Extensions (e.g., largefiles) can override this to remap files in |
|
497 | 498 | the virtual file system presented by the manifest command below.""" |
|
498 | 499 | return path |
|
499 | 500 | |
|
500 | 501 | @webcommand('manifest') |
|
501 | 502 | def manifest(web): |
|
502 | 503 | """ |
|
503 | 504 | /manifest[/{revision}[/{path}]] |
|
504 | 505 | ------------------------------- |
|
505 | 506 | |
|
506 | 507 | Show information about a directory. |
|
507 | 508 | |
|
508 | 509 | If the URL path arguments are omitted, information about the root |
|
509 | 510 | directory for the ``tip`` changeset will be shown. |
|
510 | 511 | |
|
511 | 512 | Because this handler can only show information for directories, it |
|
512 | 513 | is recommended to use the ``file`` handler instead, as it can handle both |
|
513 | 514 | directories and files. |
|
514 | 515 | |
|
515 | 516 | The ``manifest`` template will be rendered for this handler. |
|
516 | 517 | """ |
|
517 | 518 | if 'node' in web.req.qsparams: |
|
518 | 519 | ctx = webutil.changectx(web.repo, web.req) |
|
519 | 520 | symrev = webutil.symrevorshortnode(web.req, ctx) |
|
520 | 521 | else: |
|
521 | 522 | ctx = web.repo['tip'] |
|
522 | 523 | symrev = 'tip' |
|
523 | 524 | path = webutil.cleanpath(web.repo, web.req.qsparams.get('file', '')) |
|
524 | 525 | mf = ctx.manifest() |
|
525 | 526 | node = ctx.node() |
|
526 | 527 | |
|
527 | 528 | files = {} |
|
528 | 529 | dirs = {} |
|
529 | 530 | parity = paritygen(web.stripecount) |
|
530 | 531 | |
|
531 | 532 | if path and path[-1:] != "/": |
|
532 | 533 | path += "/" |
|
533 | 534 | l = len(path) |
|
534 | 535 | abspath = "/" + path |
|
535 | 536 | |
|
536 | 537 | for full, n in mf.iteritems(): |
|
537 | 538 | # the virtual path (working copy path) used for the full |
|
538 | 539 | # (repository) path |
|
539 | 540 | f = decodepath(full) |
|
540 | 541 | |
|
541 | 542 | if f[:l] != path: |
|
542 | 543 | continue |
|
543 | 544 | remain = f[l:] |
|
544 | 545 | elements = remain.split('/') |
|
545 | 546 | if len(elements) == 1: |
|
546 | 547 | files[remain] = full |
|
547 | 548 | else: |
|
548 | 549 | h = dirs # need to retain ref to dirs (root) |
|
549 | 550 | for elem in elements[0:-1]: |
|
550 | 551 | if elem not in h: |
|
551 | 552 | h[elem] = {} |
|
552 | 553 | h = h[elem] |
|
553 | 554 | if len(h) > 1: |
|
554 | 555 | break |
|
555 | 556 | h[None] = None # denotes files present |
|
556 | 557 | |
|
557 | 558 | if mf and not files and not dirs: |
|
558 | 559 | raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path) |
|
559 | 560 | |
|
560 | 561 | def filelist(**map): |
|
561 | 562 | for f in sorted(files): |
|
562 | 563 | full = files[f] |
|
563 | 564 | |
|
564 | 565 | fctx = ctx.filectx(full) |
|
565 | 566 | yield {"file": full, |
|
566 | 567 | "parity": next(parity), |
|
567 | 568 | "basename": f, |
|
568 | 569 | "date": fctx.date(), |
|
569 | 570 | "size": fctx.size(), |
|
570 | 571 | "permissions": mf.flags(full)} |
|
571 | 572 | |
|
572 | 573 | def dirlist(**map): |
|
573 | 574 | for d in sorted(dirs): |
|
574 | 575 | |
|
575 | 576 | emptydirs = [] |
|
576 | 577 | h = dirs[d] |
|
577 | 578 | while isinstance(h, dict) and len(h) == 1: |
|
578 | 579 | k, v = next(iter(h.items())) |
|
579 | 580 | if v: |
|
580 | 581 | emptydirs.append(k) |
|
581 | 582 | h = v |
|
582 | 583 | |
|
583 | 584 | path = "%s%s" % (abspath, d) |
|
584 | 585 | yield {"parity": next(parity), |
|
585 | 586 | "path": path, |
|
586 | 587 | "emptydirs": "/".join(emptydirs), |
|
587 | 588 | "basename": d} |
|
588 | 589 | |
|
589 | 590 | return web.sendtemplate( |
|
590 | 591 | 'manifest', |
|
591 | 592 | symrev=symrev, |
|
592 | 593 | path=abspath, |
|
593 | 594 | up=webutil.up(abspath), |
|
594 | 595 | upparity=next(parity), |
|
595 | 596 | fentries=filelist, |
|
596 | 597 | dentries=dirlist, |
|
597 | 598 | archives=web.archivelist(hex(node)), |
|
598 | 599 | **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))) |
|
599 | 600 | |
|
600 | 601 | @webcommand('tags') |
|
601 | 602 | def tags(web): |
|
602 | 603 | """ |
|
603 | 604 | /tags |
|
604 | 605 | ----- |
|
605 | 606 | |
|
606 | 607 | Show information about tags. |
|
607 | 608 | |
|
608 | 609 | No arguments are accepted. |
|
609 | 610 | |
|
610 | 611 | The ``tags`` template is rendered. |
|
611 | 612 | """ |
|
612 | 613 | i = list(reversed(web.repo.tagslist())) |
|
613 | 614 | parity = paritygen(web.stripecount) |
|
614 | 615 | |
|
615 | 616 | def entries(notip, latestonly, **map): |
|
616 | 617 | t = i |
|
617 | 618 | if notip: |
|
618 | 619 | t = [(k, n) for k, n in i if k != "tip"] |
|
619 | 620 | if latestonly: |
|
620 | 621 | t = t[:1] |
|
621 | 622 | for k, n in t: |
|
622 | 623 | yield {"parity": next(parity), |
|
623 | 624 | "tag": k, |
|
624 | 625 | "date": web.repo[n].date(), |
|
625 | 626 | "node": hex(n)} |
|
626 | 627 | |
|
627 | 628 | return web.sendtemplate( |
|
628 | 629 | 'tags', |
|
629 | 630 | node=hex(web.repo.changelog.tip()), |
|
630 | 631 | entries=lambda **x: entries(False, False, **x), |
|
631 | 632 | entriesnotip=lambda **x: entries(True, False, **x), |
|
632 | 633 | latestentry=lambda **x: entries(True, True, **x)) |
|
633 | 634 | |
|
634 | 635 | @webcommand('bookmarks') |
|
635 | 636 | def bookmarks(web): |
|
636 | 637 | """ |
|
637 | 638 | /bookmarks |
|
638 | 639 | ---------- |
|
639 | 640 | |
|
640 | 641 | Show information about bookmarks. |
|
641 | 642 | |
|
642 | 643 | No arguments are accepted. |
|
643 | 644 | |
|
644 | 645 | The ``bookmarks`` template is rendered. |
|
645 | 646 | """ |
|
646 | 647 | i = [b for b in web.repo._bookmarks.items() if b[1] in web.repo] |
|
647 | 648 | sortkey = lambda b: (web.repo[b[1]].rev(), b[0]) |
|
648 | 649 | i = sorted(i, key=sortkey, reverse=True) |
|
649 | 650 | parity = paritygen(web.stripecount) |
|
650 | 651 | |
|
651 | 652 | def entries(latestonly, **map): |
|
652 | 653 | t = i |
|
653 | 654 | if latestonly: |
|
654 | 655 | t = i[:1] |
|
655 | 656 | for k, n in t: |
|
656 | 657 | yield {"parity": next(parity), |
|
657 | 658 | "bookmark": k, |
|
658 | 659 | "date": web.repo[n].date(), |
|
659 | 660 | "node": hex(n)} |
|
660 | 661 | |
|
661 | 662 | if i: |
|
662 | 663 | latestrev = i[0][1] |
|
663 | 664 | else: |
|
664 | 665 | latestrev = -1 |
|
665 | 666 | |
|
666 | 667 | return web.sendtemplate( |
|
667 | 668 | 'bookmarks', |
|
668 | 669 | node=hex(web.repo.changelog.tip()), |
|
669 | 670 | lastchange=[{'date': web.repo[latestrev].date()}], |
|
670 | 671 | entries=lambda **x: entries(latestonly=False, **x), |
|
671 | 672 | latestentry=lambda **x: entries(latestonly=True, **x)) |
|
672 | 673 | |
|
673 | 674 | @webcommand('branches') |
|
674 | 675 | def branches(web): |
|
675 | 676 | """ |
|
676 | 677 | /branches |
|
677 | 678 | --------- |
|
678 | 679 | |
|
679 | 680 | Show information about branches. |
|
680 | 681 | |
|
681 | 682 | All known branches are contained in the output, even closed branches. |
|
682 | 683 | |
|
683 | 684 | No arguments are accepted. |
|
684 | 685 | |
|
685 | 686 | The ``branches`` template is rendered. |
|
686 | 687 | """ |
|
687 | 688 | entries = webutil.branchentries(web.repo, web.stripecount) |
|
688 | 689 | latestentry = webutil.branchentries(web.repo, web.stripecount, 1) |
|
689 | 690 | |
|
690 | 691 | return web.sendtemplate( |
|
691 | 692 | 'branches', |
|
692 | 693 | node=hex(web.repo.changelog.tip()), |
|
693 | 694 | entries=entries, |
|
694 | 695 | latestentry=latestentry) |
|
695 | 696 | |
|
696 | 697 | @webcommand('summary') |
|
697 | 698 | def summary(web): |
|
698 | 699 | """ |
|
699 | 700 | /summary |
|
700 | 701 | -------- |
|
701 | 702 | |
|
702 | 703 | Show a summary of repository state. |
|
703 | 704 | |
|
704 | 705 | Information about the latest changesets, bookmarks, tags, and branches |
|
705 | 706 | is captured by this handler. |
|
706 | 707 | |
|
707 | 708 | The ``summary`` template is rendered. |
|
708 | 709 | """ |
|
709 | 710 | i = reversed(web.repo.tagslist()) |
|
710 | 711 | |
|
711 | 712 | def tagentries(**map): |
|
712 | 713 | parity = paritygen(web.stripecount) |
|
713 | 714 | count = 0 |
|
714 | 715 | for k, n in i: |
|
715 | 716 | if k == "tip": # skip tip |
|
716 | 717 | continue |
|
717 | 718 | |
|
718 | 719 | count += 1 |
|
719 | 720 | if count > 10: # limit to 10 tags |
|
720 | 721 | break |
|
721 | 722 | |
|
722 | yield web.tmpl( | |
|
723 |
' |
|
|
724 | parity=next(parity), | |
|
725 |
|
|
|
726 |
|
|
|
727 | date=web.repo[n].date()) | |
|
723 | yield web.tmpl.generate('tagentry', { | |
|
724 | 'parity': next(parity), | |
|
725 | 'tag': k, | |
|
726 | 'node': hex(n), | |
|
727 | 'date': web.repo[n].date(), | |
|
728 | }) | |
|
728 | 729 | |
|
729 | 730 | def bookmarks(**map): |
|
730 | 731 | parity = paritygen(web.stripecount) |
|
731 | 732 | marks = [b for b in web.repo._bookmarks.items() if b[1] in web.repo] |
|
732 | 733 | sortkey = lambda b: (web.repo[b[1]].rev(), b[0]) |
|
733 | 734 | marks = sorted(marks, key=sortkey, reverse=True) |
|
734 | 735 | for k, n in marks[:10]: # limit to 10 bookmarks |
|
735 | 736 | yield {'parity': next(parity), |
|
736 | 737 | 'bookmark': k, |
|
737 | 738 | 'date': web.repo[n].date(), |
|
738 | 739 | 'node': hex(n)} |
|
739 | 740 | |
|
740 | 741 | def changelist(**map): |
|
741 | 742 | parity = paritygen(web.stripecount, offset=start - end) |
|
742 | 743 | l = [] # build a list in forward order for efficiency |
|
743 | 744 | revs = [] |
|
744 | 745 | if start < end: |
|
745 | 746 | revs = web.repo.changelog.revs(start, end - 1) |
|
746 | 747 | for i in revs: |
|
747 | 748 | ctx = web.repo[i] |
|
748 | ||
|
749 | l.append(web.tmpl( | |
|
750 |
|
|
|
751 | parity=next(parity), | |
|
752 | **pycompat.strkwargs(webutil.commonentry(web.repo, ctx)))) | |
|
749 | lm = webutil.commonentry(web.repo, ctx) | |
|
750 | lm['parity'] = next(parity) | |
|
751 | l.append(web.tmpl.generate('shortlogentry', lm)) | |
|
753 | 752 | |
|
754 | 753 | for entry in reversed(l): |
|
755 | 754 | yield entry |
|
756 | 755 | |
|
757 | 756 | tip = web.repo['tip'] |
|
758 | 757 | count = len(web.repo) |
|
759 | 758 | start = max(0, count - web.maxchanges) |
|
760 | 759 | end = min(count, start + web.maxchanges) |
|
761 | 760 | |
|
762 | 761 | desc = web.config("web", "description") |
|
763 | 762 | if not desc: |
|
764 | 763 | desc = 'unknown' |
|
765 | 764 | |
|
766 | 765 | return web.sendtemplate( |
|
767 | 766 | 'summary', |
|
768 | 767 | desc=desc, |
|
769 | 768 | owner=get_contact(web.config) or 'unknown', |
|
770 | 769 | lastchange=tip.date(), |
|
771 | 770 | tags=tagentries, |
|
772 | 771 | bookmarks=bookmarks, |
|
773 | 772 | branches=webutil.branchentries(web.repo, web.stripecount, 10), |
|
774 | 773 | shortlog=changelist, |
|
775 | 774 | node=tip.hex(), |
|
776 | 775 | symrev='tip', |
|
777 | 776 | archives=web.archivelist('tip'), |
|
778 | 777 | labels=web.configlist('web', 'labels')) |
|
779 | 778 | |
|
780 | 779 | @webcommand('filediff') |
|
781 | 780 | def filediff(web): |
|
782 | 781 | """ |
|
783 | 782 | /diff/{revision}/{path} |
|
784 | 783 | ----------------------- |
|
785 | 784 | |
|
786 | 785 | Show how a file changed in a particular commit. |
|
787 | 786 | |
|
788 | 787 | The ``filediff`` template is rendered. |
|
789 | 788 | |
|
790 | 789 | This handler is registered under both the ``/diff`` and ``/filediff`` |
|
791 | 790 | paths. ``/diff`` is used in modern code. |
|
792 | 791 | """ |
|
793 | 792 | fctx, ctx = None, None |
|
794 | 793 | try: |
|
795 | 794 | fctx = webutil.filectx(web.repo, web.req) |
|
796 | 795 | except LookupError: |
|
797 | 796 | ctx = webutil.changectx(web.repo, web.req) |
|
798 | 797 | path = webutil.cleanpath(web.repo, web.req.qsparams['file']) |
|
799 | 798 | if path not in ctx.files(): |
|
800 | 799 | raise |
|
801 | 800 | |
|
802 | 801 | if fctx is not None: |
|
803 | 802 | path = fctx.path() |
|
804 | 803 | ctx = fctx.changectx() |
|
805 | 804 | basectx = ctx.p1() |
|
806 | 805 | |
|
807 | 806 | style = web.config('web', 'style') |
|
808 | 807 | if 'style' in web.req.qsparams: |
|
809 | 808 | style = web.req.qsparams['style'] |
|
810 | 809 | |
|
811 | 810 | diffs = webutil.diffs(web, ctx, basectx, [path], style) |
|
812 | 811 | if fctx is not None: |
|
813 | 812 | rename = webutil.renamelink(fctx) |
|
814 | 813 | ctx = fctx |
|
815 | 814 | else: |
|
816 | 815 | rename = [] |
|
817 | 816 | ctx = ctx |
|
818 | 817 | |
|
819 | 818 | return web.sendtemplate( |
|
820 | 819 | 'filediff', |
|
821 | 820 | file=path, |
|
822 | 821 | symrev=webutil.symrevorshortnode(web.req, ctx), |
|
823 | 822 | rename=rename, |
|
824 | 823 | diff=diffs, |
|
825 | 824 | **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))) |
|
826 | 825 | |
|
827 | 826 | diff = webcommand('diff')(filediff) |
|
828 | 827 | |
|
829 | 828 | @webcommand('comparison') |
|
830 | 829 | def comparison(web): |
|
831 | 830 | """ |
|
832 | 831 | /comparison/{revision}/{path} |
|
833 | 832 | ----------------------------- |
|
834 | 833 | |
|
835 | 834 | Show a comparison between the old and new versions of a file from changes |
|
836 | 835 | made on a particular revision. |
|
837 | 836 | |
|
838 | 837 | This is similar to the ``diff`` handler. However, this form features |
|
839 | 838 | a split or side-by-side diff rather than a unified diff. |
|
840 | 839 | |
|
841 | 840 | The ``context`` query string argument can be used to control the lines of |
|
842 | 841 | context in the diff. |
|
843 | 842 | |
|
844 | 843 | The ``filecomparison`` template is rendered. |
|
845 | 844 | """ |
|
846 | 845 | ctx = webutil.changectx(web.repo, web.req) |
|
847 | 846 | if 'file' not in web.req.qsparams: |
|
848 | 847 | raise ErrorResponse(HTTP_NOT_FOUND, 'file not given') |
|
849 | 848 | path = webutil.cleanpath(web.repo, web.req.qsparams['file']) |
|
850 | 849 | |
|
851 | 850 | parsecontext = lambda v: v == 'full' and -1 or int(v) |
|
852 | 851 | if 'context' in web.req.qsparams: |
|
853 | 852 | context = parsecontext(web.req.qsparams['context']) |
|
854 | 853 | else: |
|
855 | 854 | context = parsecontext(web.config('web', 'comparisoncontext', '5')) |
|
856 | 855 | |
|
857 | 856 | def filelines(f): |
|
858 | 857 | if f.isbinary(): |
|
859 | 858 | mt = mimetypes.guess_type(f.path())[0] |
|
860 | 859 | if not mt: |
|
861 | 860 | mt = 'application/octet-stream' |
|
862 | 861 | return [_('(binary file %s, hash: %s)') % (mt, hex(f.filenode()))] |
|
863 | 862 | return f.data().splitlines() |
|
864 | 863 | |
|
865 | 864 | fctx = None |
|
866 | 865 | parent = ctx.p1() |
|
867 | 866 | leftrev = parent.rev() |
|
868 | 867 | leftnode = parent.node() |
|
869 | 868 | rightrev = ctx.rev() |
|
870 | 869 | rightnode = ctx.node() |
|
871 | 870 | if path in ctx: |
|
872 | 871 | fctx = ctx[path] |
|
873 | 872 | rightlines = filelines(fctx) |
|
874 | 873 | if path not in parent: |
|
875 | 874 | leftlines = () |
|
876 | 875 | else: |
|
877 | 876 | pfctx = parent[path] |
|
878 | 877 | leftlines = filelines(pfctx) |
|
879 | 878 | else: |
|
880 | 879 | rightlines = () |
|
881 | 880 | pfctx = ctx.parents()[0][path] |
|
882 | 881 | leftlines = filelines(pfctx) |
|
883 | 882 | |
|
884 | 883 | comparison = webutil.compare(web.tmpl, context, leftlines, rightlines) |
|
885 | 884 | if fctx is not None: |
|
886 | 885 | rename = webutil.renamelink(fctx) |
|
887 | 886 | ctx = fctx |
|
888 | 887 | else: |
|
889 | 888 | rename = [] |
|
890 | 889 | ctx = ctx |
|
891 | 890 | |
|
892 | 891 | return web.sendtemplate( |
|
893 | 892 | 'filecomparison', |
|
894 | 893 | file=path, |
|
895 | 894 | symrev=webutil.symrevorshortnode(web.req, ctx), |
|
896 | 895 | rename=rename, |
|
897 | 896 | leftrev=leftrev, |
|
898 | 897 | leftnode=hex(leftnode), |
|
899 | 898 | rightrev=rightrev, |
|
900 | 899 | rightnode=hex(rightnode), |
|
901 | 900 | comparison=comparison, |
|
902 | 901 | **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))) |
|
903 | 902 | |
|
904 | 903 | @webcommand('annotate') |
|
905 | 904 | def annotate(web): |
|
906 | 905 | """ |
|
907 | 906 | /annotate/{revision}/{path} |
|
908 | 907 | --------------------------- |
|
909 | 908 | |
|
910 | 909 | Show changeset information for each line in a file. |
|
911 | 910 | |
|
912 | 911 | The ``ignorews``, ``ignorewsamount``, ``ignorewseol``, and |
|
913 | 912 | ``ignoreblanklines`` query string arguments have the same meaning as |
|
914 | 913 | their ``[annotate]`` config equivalents. It uses the hgrc boolean |
|
915 | 914 | parsing logic to interpret the value. e.g. ``0`` and ``false`` are |
|
916 | 915 | false and ``1`` and ``true`` are true. If not defined, the server |
|
917 | 916 | default settings are used. |
|
918 | 917 | |
|
919 | 918 | The ``fileannotate`` template is rendered. |
|
920 | 919 | """ |
|
921 | 920 | fctx = webutil.filectx(web.repo, web.req) |
|
922 | 921 | f = fctx.path() |
|
923 | 922 | parity = paritygen(web.stripecount) |
|
924 | 923 | ishead = fctx.filerev() in fctx.filelog().headrevs() |
|
925 | 924 | |
|
926 | 925 | # parents() is called once per line and several lines likely belong to |
|
927 | 926 | # same revision. So it is worth caching. |
|
928 | 927 | # TODO there are still redundant operations within basefilectx.parents() |
|
929 | 928 | # and from the fctx.annotate() call itself that could be cached. |
|
930 | 929 | parentscache = {} |
|
931 | 930 | def parents(f): |
|
932 | 931 | rev = f.rev() |
|
933 | 932 | if rev not in parentscache: |
|
934 | 933 | parentscache[rev] = [] |
|
935 | 934 | for p in f.parents(): |
|
936 | 935 | entry = { |
|
937 | 936 | 'node': p.hex(), |
|
938 | 937 | 'rev': p.rev(), |
|
939 | 938 | } |
|
940 | 939 | parentscache[rev].append(entry) |
|
941 | 940 | |
|
942 | 941 | for p in parentscache[rev]: |
|
943 | 942 | yield p |
|
944 | 943 | |
|
945 | 944 | def annotate(**map): |
|
946 | 945 | if fctx.isbinary(): |
|
947 | 946 | mt = (mimetypes.guess_type(fctx.path())[0] |
|
948 | 947 | or 'application/octet-stream') |
|
949 | 948 | lines = [((fctx.filectx(fctx.filerev()), 1), '(binary:%s)' % mt)] |
|
950 | 949 | else: |
|
951 | 950 | lines = webutil.annotate(web.req, fctx, web.repo.ui) |
|
952 | 951 | |
|
953 | 952 | previousrev = None |
|
954 | 953 | blockparitygen = paritygen(1) |
|
955 | 954 | for lineno, (aline, l) in enumerate(lines): |
|
956 | 955 | f = aline.fctx |
|
957 | 956 | rev = f.rev() |
|
958 | 957 | if rev != previousrev: |
|
959 | 958 | blockhead = True |
|
960 | 959 | blockparity = next(blockparitygen) |
|
961 | 960 | else: |
|
962 | 961 | blockhead = None |
|
963 | 962 | previousrev = rev |
|
964 | 963 | yield {"parity": next(parity), |
|
965 | 964 | "node": f.hex(), |
|
966 | 965 | "rev": rev, |
|
967 | 966 | "author": f.user(), |
|
968 | 967 | "parents": parents(f), |
|
969 | 968 | "desc": f.description(), |
|
970 | 969 | "extra": f.extra(), |
|
971 | 970 | "file": f.path(), |
|
972 | 971 | "blockhead": blockhead, |
|
973 | 972 | "blockparity": blockparity, |
|
974 | 973 | "targetline": aline.lineno, |
|
975 | 974 | "line": l, |
|
976 | 975 | "lineno": lineno + 1, |
|
977 | 976 | "lineid": "l%d" % (lineno + 1), |
|
978 | 977 | "linenumber": "% 6d" % (lineno + 1), |
|
979 | 978 | "revdate": f.date()} |
|
980 | 979 | |
|
981 | 980 | diffopts = webutil.difffeatureopts(web.req, web.repo.ui, 'annotate') |
|
982 | 981 | diffopts = {k: getattr(diffopts, k) for k in diffopts.defaults} |
|
983 | 982 | |
|
984 | 983 | return web.sendtemplate( |
|
985 | 984 | 'fileannotate', |
|
986 | 985 | file=f, |
|
987 | 986 | annotate=annotate, |
|
988 | 987 | path=webutil.up(f), |
|
989 | 988 | symrev=webutil.symrevorshortnode(web.req, fctx), |
|
990 | 989 | rename=webutil.renamelink(fctx), |
|
991 | 990 | permissions=fctx.manifest().flags(f), |
|
992 | 991 | ishead=int(ishead), |
|
993 | 992 | diffopts=diffopts, |
|
994 | 993 | **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))) |
|
995 | 994 | |
|
996 | 995 | @webcommand('filelog') |
|
997 | 996 | def filelog(web): |
|
998 | 997 | """ |
|
999 | 998 | /filelog/{revision}/{path} |
|
1000 | 999 | -------------------------- |
|
1001 | 1000 | |
|
1002 | 1001 | Show information about the history of a file in the repository. |
|
1003 | 1002 | |
|
1004 | 1003 | The ``revcount`` query string argument can be defined to control the |
|
1005 | 1004 | maximum number of entries to show. |
|
1006 | 1005 | |
|
1007 | 1006 | The ``filelog`` template will be rendered. |
|
1008 | 1007 | """ |
|
1009 | 1008 | |
|
1010 | 1009 | try: |
|
1011 | 1010 | fctx = webutil.filectx(web.repo, web.req) |
|
1012 | 1011 | f = fctx.path() |
|
1013 | 1012 | fl = fctx.filelog() |
|
1014 | 1013 | except error.LookupError: |
|
1015 | 1014 | f = webutil.cleanpath(web.repo, web.req.qsparams['file']) |
|
1016 | 1015 | fl = web.repo.file(f) |
|
1017 | 1016 | numrevs = len(fl) |
|
1018 | 1017 | if not numrevs: # file doesn't exist at all |
|
1019 | 1018 | raise |
|
1020 | 1019 | rev = webutil.changectx(web.repo, web.req).rev() |
|
1021 | 1020 | first = fl.linkrev(0) |
|
1022 | 1021 | if rev < first: # current rev is from before file existed |
|
1023 | 1022 | raise |
|
1024 | 1023 | frev = numrevs - 1 |
|
1025 | 1024 | while fl.linkrev(frev) > rev: |
|
1026 | 1025 | frev -= 1 |
|
1027 | 1026 | fctx = web.repo.filectx(f, fl.linkrev(frev)) |
|
1028 | 1027 | |
|
1029 | 1028 | revcount = web.maxshortchanges |
|
1030 | 1029 | if 'revcount' in web.req.qsparams: |
|
1031 | 1030 | try: |
|
1032 | 1031 | revcount = int(web.req.qsparams.get('revcount', revcount)) |
|
1033 | 1032 | revcount = max(revcount, 1) |
|
1034 | 1033 | web.tmpl.defaults['sessionvars']['revcount'] = revcount |
|
1035 | 1034 | except ValueError: |
|
1036 | 1035 | pass |
|
1037 | 1036 | |
|
1038 | 1037 | lrange = webutil.linerange(web.req) |
|
1039 | 1038 | |
|
1040 | 1039 | lessvars = copy.copy(web.tmpl.defaults['sessionvars']) |
|
1041 | 1040 | lessvars['revcount'] = max(revcount // 2, 1) |
|
1042 | 1041 | morevars = copy.copy(web.tmpl.defaults['sessionvars']) |
|
1043 | 1042 | morevars['revcount'] = revcount * 2 |
|
1044 | 1043 | |
|
1045 | 1044 | patch = 'patch' in web.req.qsparams |
|
1046 | 1045 | if patch: |
|
1047 | 1046 | lessvars['patch'] = morevars['patch'] = web.req.qsparams['patch'] |
|
1048 | 1047 | descend = 'descend' in web.req.qsparams |
|
1049 | 1048 | if descend: |
|
1050 | 1049 | lessvars['descend'] = morevars['descend'] = web.req.qsparams['descend'] |
|
1051 | 1050 | |
|
1052 | 1051 | count = fctx.filerev() + 1 |
|
1053 | 1052 | start = max(0, count - revcount) # first rev on this page |
|
1054 | 1053 | end = min(count, start + revcount) # last rev on this page |
|
1055 | 1054 | parity = paritygen(web.stripecount, offset=start - end) |
|
1056 | 1055 | |
|
1057 | 1056 | repo = web.repo |
|
1058 | 1057 | revs = fctx.filelog().revs(start, end - 1) |
|
1059 | 1058 | entries = [] |
|
1060 | 1059 | |
|
1061 | 1060 | diffstyle = web.config('web', 'style') |
|
1062 | 1061 | if 'style' in web.req.qsparams: |
|
1063 | 1062 | diffstyle = web.req.qsparams['style'] |
|
1064 | 1063 | |
|
1065 | 1064 | def diff(fctx, linerange=None): |
|
1066 | 1065 | ctx = fctx.changectx() |
|
1067 | 1066 | basectx = ctx.p1() |
|
1068 | 1067 | path = fctx.path() |
|
1069 | 1068 | return webutil.diffs(web, ctx, basectx, [path], diffstyle, |
|
1070 | 1069 | linerange=linerange, |
|
1071 | 1070 | lineidprefix='%s-' % ctx.hex()[:12]) |
|
1072 | 1071 | |
|
1073 | 1072 | linerange = None |
|
1074 | 1073 | if lrange is not None: |
|
1075 | 1074 | linerange = webutil.formatlinerange(*lrange) |
|
1076 | 1075 | # deactivate numeric nav links when linerange is specified as this |
|
1077 | 1076 | # would required a dedicated "revnav" class |
|
1078 | 1077 | nav = None |
|
1079 | 1078 | if descend: |
|
1080 | 1079 | it = dagop.blockdescendants(fctx, *lrange) |
|
1081 | 1080 | else: |
|
1082 | 1081 | it = dagop.blockancestors(fctx, *lrange) |
|
1083 | 1082 | for i, (c, lr) in enumerate(it, 1): |
|
1084 | 1083 | diffs = None |
|
1085 | 1084 | if patch: |
|
1086 | 1085 | diffs = diff(c, linerange=lr) |
|
1087 | 1086 | # follow renames accross filtered (not in range) revisions |
|
1088 | 1087 | path = c.path() |
|
1089 | 1088 | entries.append(dict( |
|
1090 | 1089 | parity=next(parity), |
|
1091 | 1090 | filerev=c.rev(), |
|
1092 | 1091 | file=path, |
|
1093 | 1092 | diff=diffs, |
|
1094 | 1093 | linerange=webutil.formatlinerange(*lr), |
|
1095 | 1094 | **pycompat.strkwargs(webutil.commonentry(repo, c)))) |
|
1096 | 1095 | if i == revcount: |
|
1097 | 1096 | break |
|
1098 | 1097 | lessvars['linerange'] = webutil.formatlinerange(*lrange) |
|
1099 | 1098 | morevars['linerange'] = lessvars['linerange'] |
|
1100 | 1099 | else: |
|
1101 | 1100 | for i in revs: |
|
1102 | 1101 | iterfctx = fctx.filectx(i) |
|
1103 | 1102 | diffs = None |
|
1104 | 1103 | if patch: |
|
1105 | 1104 | diffs = diff(iterfctx) |
|
1106 | 1105 | entries.append(dict( |
|
1107 | 1106 | parity=next(parity), |
|
1108 | 1107 | filerev=i, |
|
1109 | 1108 | file=f, |
|
1110 | 1109 | diff=diffs, |
|
1111 | 1110 | rename=webutil.renamelink(iterfctx), |
|
1112 | 1111 | **pycompat.strkwargs(webutil.commonentry(repo, iterfctx)))) |
|
1113 | 1112 | entries.reverse() |
|
1114 | 1113 | revnav = webutil.filerevnav(web.repo, fctx.path()) |
|
1115 | 1114 | nav = revnav.gen(end - 1, revcount, count) |
|
1116 | 1115 | |
|
1117 | 1116 | latestentry = entries[:1] |
|
1118 | 1117 | |
|
1119 | 1118 | return web.sendtemplate( |
|
1120 | 1119 | 'filelog', |
|
1121 | 1120 | file=f, |
|
1122 | 1121 | nav=nav, |
|
1123 | 1122 | symrev=webutil.symrevorshortnode(web.req, fctx), |
|
1124 | 1123 | entries=entries, |
|
1125 | 1124 | descend=descend, |
|
1126 | 1125 | patch=patch, |
|
1127 | 1126 | latestentry=latestentry, |
|
1128 | 1127 | linerange=linerange, |
|
1129 | 1128 | revcount=revcount, |
|
1130 | 1129 | morevars=morevars, |
|
1131 | 1130 | lessvars=lessvars, |
|
1132 | 1131 | **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))) |
|
1133 | 1132 | |
|
1134 | 1133 | @webcommand('archive') |
|
1135 | 1134 | def archive(web): |
|
1136 | 1135 | """ |
|
1137 | 1136 | /archive/{revision}.{format}[/{path}] |
|
1138 | 1137 | ------------------------------------- |
|
1139 | 1138 | |
|
1140 | 1139 | Obtain an archive of repository content. |
|
1141 | 1140 | |
|
1142 | 1141 | The content and type of the archive is defined by a URL path parameter. |
|
1143 | 1142 | ``format`` is the file extension of the archive type to be generated. e.g. |
|
1144 | 1143 | ``zip`` or ``tar.bz2``. Not all archive types may be allowed by your |
|
1145 | 1144 | server configuration. |
|
1146 | 1145 | |
|
1147 | 1146 | The optional ``path`` URL parameter controls content to include in the |
|
1148 | 1147 | archive. If omitted, every file in the specified revision is present in the |
|
1149 | 1148 | archive. If included, only the specified file or contents of the specified |
|
1150 | 1149 | directory will be included in the archive. |
|
1151 | 1150 | |
|
1152 | 1151 | No template is used for this handler. Raw, binary content is generated. |
|
1153 | 1152 | """ |
|
1154 | 1153 | |
|
1155 | 1154 | type_ = web.req.qsparams.get('type') |
|
1156 | 1155 | allowed = web.configlist("web", "allow_archive") |
|
1157 | 1156 | key = web.req.qsparams['node'] |
|
1158 | 1157 | |
|
1159 | 1158 | if type_ not in web.archivespecs: |
|
1160 | 1159 | msg = 'Unsupported archive type: %s' % type_ |
|
1161 | 1160 | raise ErrorResponse(HTTP_NOT_FOUND, msg) |
|
1162 | 1161 | |
|
1163 | 1162 | if not ((type_ in allowed or |
|
1164 | 1163 | web.configbool("web", "allow" + type_))): |
|
1165 | 1164 | msg = 'Archive type not allowed: %s' % type_ |
|
1166 | 1165 | raise ErrorResponse(HTTP_FORBIDDEN, msg) |
|
1167 | 1166 | |
|
1168 | 1167 | reponame = re.sub(br"\W+", "-", os.path.basename(web.reponame)) |
|
1169 | 1168 | cnode = web.repo.lookup(key) |
|
1170 | 1169 | arch_version = key |
|
1171 | 1170 | if cnode == key or key == 'tip': |
|
1172 | 1171 | arch_version = short(cnode) |
|
1173 | 1172 | name = "%s-%s" % (reponame, arch_version) |
|
1174 | 1173 | |
|
1175 | 1174 | ctx = webutil.changectx(web.repo, web.req) |
|
1176 | 1175 | pats = [] |
|
1177 | 1176 | match = scmutil.match(ctx, []) |
|
1178 | 1177 | file = web.req.qsparams.get('file') |
|
1179 | 1178 | if file: |
|
1180 | 1179 | pats = ['path:' + file] |
|
1181 | 1180 | match = scmutil.match(ctx, pats, default='path') |
|
1182 | 1181 | if pats: |
|
1183 | 1182 | files = [f for f in ctx.manifest().keys() if match(f)] |
|
1184 | 1183 | if not files: |
|
1185 | 1184 | raise ErrorResponse(HTTP_NOT_FOUND, |
|
1186 | 1185 | 'file(s) not found: %s' % file) |
|
1187 | 1186 | |
|
1188 | 1187 | mimetype, artype, extension, encoding = web.archivespecs[type_] |
|
1189 | 1188 | |
|
1190 | 1189 | web.res.headers['Content-Type'] = mimetype |
|
1191 | 1190 | web.res.headers['Content-Disposition'] = 'attachment; filename=%s%s' % ( |
|
1192 | 1191 | name, extension) |
|
1193 | 1192 | |
|
1194 | 1193 | if encoding: |
|
1195 | 1194 | web.res.headers['Content-Encoding'] = encoding |
|
1196 | 1195 | |
|
1197 | 1196 | web.res.setbodywillwrite() |
|
1198 | 1197 | if list(web.res.sendresponse()): |
|
1199 | 1198 | raise error.ProgrammingError('sendresponse() should not emit data ' |
|
1200 | 1199 | 'if writing later') |
|
1201 | 1200 | |
|
1202 | 1201 | bodyfh = web.res.getbodyfile() |
|
1203 | 1202 | |
|
1204 | 1203 | archival.archive(web.repo, bodyfh, cnode, artype, prefix=name, |
|
1205 | 1204 | matchfn=match, |
|
1206 | 1205 | subrepos=web.configbool("web", "archivesubrepos")) |
|
1207 | 1206 | |
|
1208 | 1207 | return [] |
|
1209 | 1208 | |
|
1210 | 1209 | @webcommand('static') |
|
1211 | 1210 | def static(web): |
|
1212 | 1211 | fname = web.req.qsparams['file'] |
|
1213 | 1212 | # a repo owner may set web.static in .hg/hgrc to get any file |
|
1214 | 1213 | # readable by the user running the CGI script |
|
1215 | 1214 | static = web.config("web", "static", None, untrusted=False) |
|
1216 | 1215 | if not static: |
|
1217 | 1216 | tp = web.templatepath or templater.templatepaths() |
|
1218 | 1217 | if isinstance(tp, str): |
|
1219 | 1218 | tp = [tp] |
|
1220 | 1219 | static = [os.path.join(p, 'static') for p in tp] |
|
1221 | 1220 | |
|
1222 | 1221 | staticfile(static, fname, web.res) |
|
1223 | 1222 | return web.res.sendresponse() |
|
1224 | 1223 | |
|
1225 | 1224 | @webcommand('graph') |
|
1226 | 1225 | def graph(web): |
|
1227 | 1226 | """ |
|
1228 | 1227 | /graph[/{revision}] |
|
1229 | 1228 | ------------------- |
|
1230 | 1229 | |
|
1231 | 1230 | Show information about the graphical topology of the repository. |
|
1232 | 1231 | |
|
1233 | 1232 | Information rendered by this handler can be used to create visual |
|
1234 | 1233 | representations of repository topology. |
|
1235 | 1234 | |
|
1236 | 1235 | The ``revision`` URL parameter controls the starting changeset. If it's |
|
1237 | 1236 | absent, the default is ``tip``. |
|
1238 | 1237 | |
|
1239 | 1238 | The ``revcount`` query string argument can define the number of changesets |
|
1240 | 1239 | to show information for. |
|
1241 | 1240 | |
|
1242 | 1241 | The ``graphtop`` query string argument can specify the starting changeset |
|
1243 | 1242 | for producing ``jsdata`` variable that is used for rendering graph in |
|
1244 | 1243 | JavaScript. By default it has the same value as ``revision``. |
|
1245 | 1244 | |
|
1246 | 1245 | This handler will render the ``graph`` template. |
|
1247 | 1246 | """ |
|
1248 | 1247 | |
|
1249 | 1248 | if 'node' in web.req.qsparams: |
|
1250 | 1249 | ctx = webutil.changectx(web.repo, web.req) |
|
1251 | 1250 | symrev = webutil.symrevorshortnode(web.req, ctx) |
|
1252 | 1251 | else: |
|
1253 | 1252 | ctx = web.repo['tip'] |
|
1254 | 1253 | symrev = 'tip' |
|
1255 | 1254 | rev = ctx.rev() |
|
1256 | 1255 | |
|
1257 | 1256 | bg_height = 39 |
|
1258 | 1257 | revcount = web.maxshortchanges |
|
1259 | 1258 | if 'revcount' in web.req.qsparams: |
|
1260 | 1259 | try: |
|
1261 | 1260 | revcount = int(web.req.qsparams.get('revcount', revcount)) |
|
1262 | 1261 | revcount = max(revcount, 1) |
|
1263 | 1262 | web.tmpl.defaults['sessionvars']['revcount'] = revcount |
|
1264 | 1263 | except ValueError: |
|
1265 | 1264 | pass |
|
1266 | 1265 | |
|
1267 | 1266 | lessvars = copy.copy(web.tmpl.defaults['sessionvars']) |
|
1268 | 1267 | lessvars['revcount'] = max(revcount // 2, 1) |
|
1269 | 1268 | morevars = copy.copy(web.tmpl.defaults['sessionvars']) |
|
1270 | 1269 | morevars['revcount'] = revcount * 2 |
|
1271 | 1270 | |
|
1272 | 1271 | graphtop = web.req.qsparams.get('graphtop', ctx.hex()) |
|
1273 | 1272 | graphvars = copy.copy(web.tmpl.defaults['sessionvars']) |
|
1274 | 1273 | graphvars['graphtop'] = graphtop |
|
1275 | 1274 | |
|
1276 | 1275 | count = len(web.repo) |
|
1277 | 1276 | pos = rev |
|
1278 | 1277 | |
|
1279 | 1278 | uprev = min(max(0, count - 1), rev + revcount) |
|
1280 | 1279 | downrev = max(0, rev - revcount) |
|
1281 | 1280 | changenav = webutil.revnav(web.repo).gen(pos, revcount, count) |
|
1282 | 1281 | |
|
1283 | 1282 | tree = [] |
|
1284 | 1283 | nextentry = [] |
|
1285 | 1284 | lastrev = 0 |
|
1286 | 1285 | if pos != -1: |
|
1287 | 1286 | allrevs = web.repo.changelog.revs(pos, 0) |
|
1288 | 1287 | revs = [] |
|
1289 | 1288 | for i in allrevs: |
|
1290 | 1289 | revs.append(i) |
|
1291 | 1290 | if len(revs) >= revcount + 1: |
|
1292 | 1291 | break |
|
1293 | 1292 | |
|
1294 | 1293 | if len(revs) > revcount: |
|
1295 | 1294 | nextentry = [webutil.commonentry(web.repo, web.repo[revs[-1]])] |
|
1296 | 1295 | revs = revs[:-1] |
|
1297 | 1296 | |
|
1298 | 1297 | lastrev = revs[-1] |
|
1299 | 1298 | |
|
1300 | 1299 | # We have to feed a baseset to dagwalker as it is expecting smartset |
|
1301 | 1300 | # object. This does not have a big impact on hgweb performance itself |
|
1302 | 1301 | # since hgweb graphing code is not itself lazy yet. |
|
1303 | 1302 | dag = graphmod.dagwalker(web.repo, smartset.baseset(revs)) |
|
1304 | 1303 | # As we said one line above... not lazy. |
|
1305 | 1304 | tree = list(item for item in graphmod.colored(dag, web.repo) |
|
1306 | 1305 | if item[1] == graphmod.CHANGESET) |
|
1307 | 1306 | |
|
1308 | 1307 | def nodecurrent(ctx): |
|
1309 | 1308 | wpnodes = web.repo.dirstate.parents() |
|
1310 | 1309 | if wpnodes[1] == nullid: |
|
1311 | 1310 | wpnodes = wpnodes[:1] |
|
1312 | 1311 | if ctx.node() in wpnodes: |
|
1313 | 1312 | return '@' |
|
1314 | 1313 | return '' |
|
1315 | 1314 | |
|
1316 | 1315 | def nodesymbol(ctx): |
|
1317 | 1316 | if ctx.obsolete(): |
|
1318 | 1317 | return 'x' |
|
1319 | 1318 | elif ctx.isunstable(): |
|
1320 | 1319 | return '*' |
|
1321 | 1320 | elif ctx.closesbranch(): |
|
1322 | 1321 | return '_' |
|
1323 | 1322 | else: |
|
1324 | 1323 | return 'o' |
|
1325 | 1324 | |
|
1326 | 1325 | def fulltree(): |
|
1327 | 1326 | pos = web.repo[graphtop].rev() |
|
1328 | 1327 | tree = [] |
|
1329 | 1328 | if pos != -1: |
|
1330 | 1329 | revs = web.repo.changelog.revs(pos, lastrev) |
|
1331 | 1330 | dag = graphmod.dagwalker(web.repo, smartset.baseset(revs)) |
|
1332 | 1331 | tree = list(item for item in graphmod.colored(dag, web.repo) |
|
1333 | 1332 | if item[1] == graphmod.CHANGESET) |
|
1334 | 1333 | return tree |
|
1335 | 1334 | |
|
1336 | 1335 | def jsdata(): |
|
1337 | 1336 | return [{'node': pycompat.bytestr(ctx), |
|
1338 | 1337 | 'graphnode': nodecurrent(ctx) + nodesymbol(ctx), |
|
1339 | 1338 | 'vertex': vtx, |
|
1340 | 1339 | 'edges': edges} |
|
1341 | 1340 | for (id, type, ctx, vtx, edges) in fulltree()] |
|
1342 | 1341 | |
|
1343 | 1342 | def nodes(): |
|
1344 | 1343 | parity = paritygen(web.stripecount) |
|
1345 | 1344 | for row, (id, type, ctx, vtx, edges) in enumerate(tree): |
|
1346 | 1345 | entry = webutil.commonentry(web.repo, ctx) |
|
1347 | 1346 | edgedata = [{'col': edge[0], |
|
1348 | 1347 | 'nextcol': edge[1], |
|
1349 | 1348 | 'color': (edge[2] - 1) % 6 + 1, |
|
1350 | 1349 | 'width': edge[3], |
|
1351 | 1350 | 'bcolor': edge[4]} |
|
1352 | 1351 | for edge in edges] |
|
1353 | 1352 | |
|
1354 | 1353 | entry.update({'col': vtx[0], |
|
1355 | 1354 | 'color': (vtx[1] - 1) % 6 + 1, |
|
1356 | 1355 | 'parity': next(parity), |
|
1357 | 1356 | 'edges': edgedata, |
|
1358 | 1357 | 'row': row, |
|
1359 | 1358 | 'nextrow': row + 1}) |
|
1360 | 1359 | |
|
1361 | 1360 | yield entry |
|
1362 | 1361 | |
|
1363 | 1362 | rows = len(tree) |
|
1364 | 1363 | |
|
1365 | 1364 | return web.sendtemplate( |
|
1366 | 1365 | 'graph', |
|
1367 | 1366 | rev=rev, |
|
1368 | 1367 | symrev=symrev, |
|
1369 | 1368 | revcount=revcount, |
|
1370 | 1369 | uprev=uprev, |
|
1371 | 1370 | lessvars=lessvars, |
|
1372 | 1371 | morevars=morevars, |
|
1373 | 1372 | downrev=downrev, |
|
1374 | 1373 | graphvars=graphvars, |
|
1375 | 1374 | rows=rows, |
|
1376 | 1375 | bg_height=bg_height, |
|
1377 | 1376 | changesets=count, |
|
1378 | 1377 | nextentry=nextentry, |
|
1379 | 1378 | jsdata=lambda **x: jsdata(), |
|
1380 | 1379 | nodes=lambda **x: nodes(), |
|
1381 | 1380 | node=ctx.hex(), |
|
1382 | 1381 | changenav=changenav) |
|
1383 | 1382 | |
|
1384 | 1383 | def _getdoc(e): |
|
1385 | 1384 | doc = e[0].__doc__ |
|
1386 | 1385 | if doc: |
|
1387 | 1386 | doc = _(doc).partition('\n')[0] |
|
1388 | 1387 | else: |
|
1389 | 1388 | doc = _('(no help text available)') |
|
1390 | 1389 | return doc |
|
1391 | 1390 | |
|
1392 | 1391 | @webcommand('help') |
|
1393 | 1392 | def help(web): |
|
1394 | 1393 | """ |
|
1395 | 1394 | /help[/{topic}] |
|
1396 | 1395 | --------------- |
|
1397 | 1396 | |
|
1398 | 1397 | Render help documentation. |
|
1399 | 1398 | |
|
1400 | 1399 | This web command is roughly equivalent to :hg:`help`. If a ``topic`` |
|
1401 | 1400 | is defined, that help topic will be rendered. If not, an index of |
|
1402 | 1401 | available help topics will be rendered. |
|
1403 | 1402 | |
|
1404 | 1403 | The ``help`` template will be rendered when requesting help for a topic. |
|
1405 | 1404 | ``helptopics`` will be rendered for the index of help topics. |
|
1406 | 1405 | """ |
|
1407 | 1406 | from .. import commands, help as helpmod # avoid cycle |
|
1408 | 1407 | |
|
1409 | 1408 | topicname = web.req.qsparams.get('node') |
|
1410 | 1409 | if not topicname: |
|
1411 | 1410 | def topics(**map): |
|
1412 | 1411 | for entries, summary, _doc in helpmod.helptable: |
|
1413 | 1412 | yield {'topic': entries[0], 'summary': summary} |
|
1414 | 1413 | |
|
1415 | 1414 | early, other = [], [] |
|
1416 | 1415 | primary = lambda s: s.partition('|')[0] |
|
1417 | 1416 | for c, e in commands.table.iteritems(): |
|
1418 | 1417 | doc = _getdoc(e) |
|
1419 | 1418 | if 'DEPRECATED' in doc or c.startswith('debug'): |
|
1420 | 1419 | continue |
|
1421 | 1420 | cmd = primary(c) |
|
1422 | 1421 | if cmd.startswith('^'): |
|
1423 | 1422 | early.append((cmd[1:], doc)) |
|
1424 | 1423 | else: |
|
1425 | 1424 | other.append((cmd, doc)) |
|
1426 | 1425 | |
|
1427 | 1426 | early.sort() |
|
1428 | 1427 | other.sort() |
|
1429 | 1428 | |
|
1430 | 1429 | def earlycommands(**map): |
|
1431 | 1430 | for c, doc in early: |
|
1432 | 1431 | yield {'topic': c, 'summary': doc} |
|
1433 | 1432 | |
|
1434 | 1433 | def othercommands(**map): |
|
1435 | 1434 | for c, doc in other: |
|
1436 | 1435 | yield {'topic': c, 'summary': doc} |
|
1437 | 1436 | |
|
1438 | 1437 | return web.sendtemplate( |
|
1439 | 1438 | 'helptopics', |
|
1440 | 1439 | topics=topics, |
|
1441 | 1440 | earlycommands=earlycommands, |
|
1442 | 1441 | othercommands=othercommands, |
|
1443 | 1442 | title='Index') |
|
1444 | 1443 | |
|
1445 | 1444 | # Render an index of sub-topics. |
|
1446 | 1445 | if topicname in helpmod.subtopics: |
|
1447 | 1446 | topics = [] |
|
1448 | 1447 | for entries, summary, _doc in helpmod.subtopics[topicname]: |
|
1449 | 1448 | topics.append({ |
|
1450 | 1449 | 'topic': '%s.%s' % (topicname, entries[0]), |
|
1451 | 1450 | 'basename': entries[0], |
|
1452 | 1451 | 'summary': summary, |
|
1453 | 1452 | }) |
|
1454 | 1453 | |
|
1455 | 1454 | return web.sendtemplate( |
|
1456 | 1455 | 'helptopics', |
|
1457 | 1456 | topics=topics, |
|
1458 | 1457 | title=topicname, |
|
1459 | 1458 | subindex=True) |
|
1460 | 1459 | |
|
1461 | 1460 | u = webutil.wsgiui.load() |
|
1462 | 1461 | u.verbose = True |
|
1463 | 1462 | |
|
1464 | 1463 | # Render a page from a sub-topic. |
|
1465 | 1464 | if '.' in topicname: |
|
1466 | 1465 | # TODO implement support for rendering sections, like |
|
1467 | 1466 | # `hg help` works. |
|
1468 | 1467 | topic, subtopic = topicname.split('.', 1) |
|
1469 | 1468 | if topic not in helpmod.subtopics: |
|
1470 | 1469 | raise ErrorResponse(HTTP_NOT_FOUND) |
|
1471 | 1470 | else: |
|
1472 | 1471 | topic = topicname |
|
1473 | 1472 | subtopic = None |
|
1474 | 1473 | |
|
1475 | 1474 | try: |
|
1476 | 1475 | doc = helpmod.help_(u, commands, topic, subtopic=subtopic) |
|
1477 | 1476 | except error.Abort: |
|
1478 | 1477 | raise ErrorResponse(HTTP_NOT_FOUND) |
|
1479 | 1478 | |
|
1480 | 1479 | return web.sendtemplate( |
|
1481 | 1480 | 'help', |
|
1482 | 1481 | topic=topicname, |
|
1483 | 1482 | doc=doc) |
|
1484 | 1483 | |
|
1485 | 1484 | # tell hggettext to extract docstrings from these functions: |
|
1486 | 1485 | i18nfunctions = commands.values() |
@@ -1,680 +1,700 b'' | |||
|
1 | 1 | # hgweb/webutil.py - utility library for the web interface. |
|
2 | 2 | # |
|
3 | 3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
4 | 4 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
5 | 5 | # |
|
6 | 6 | # This software may be used and distributed according to the terms of the |
|
7 | 7 | # GNU General Public License version 2 or any later version. |
|
8 | 8 | |
|
9 | 9 | from __future__ import absolute_import |
|
10 | 10 | |
|
11 | 11 | import copy |
|
12 | 12 | import difflib |
|
13 | 13 | import os |
|
14 | 14 | import re |
|
15 | 15 | |
|
16 | 16 | from ..i18n import _ |
|
17 | 17 | from ..node import hex, nullid, short |
|
18 | 18 | |
|
19 | 19 | from .common import ( |
|
20 | 20 | ErrorResponse, |
|
21 | 21 | HTTP_BAD_REQUEST, |
|
22 | 22 | HTTP_NOT_FOUND, |
|
23 | 23 | paritygen, |
|
24 | 24 | ) |
|
25 | 25 | |
|
26 | 26 | from .. import ( |
|
27 | 27 | context, |
|
28 | 28 | error, |
|
29 | 29 | match, |
|
30 | 30 | mdiff, |
|
31 | 31 | obsutil, |
|
32 | 32 | patch, |
|
33 | 33 | pathutil, |
|
34 | 34 | pycompat, |
|
35 | 35 | templatefilters, |
|
36 | 36 | templatekw, |
|
37 | 37 | ui as uimod, |
|
38 | 38 | util, |
|
39 | 39 | ) |
|
40 | 40 | |
|
41 | 41 | def up(p): |
|
42 | 42 | if p[0:1] != "/": |
|
43 | 43 | p = "/" + p |
|
44 | 44 | if p[-1:] == "/": |
|
45 | 45 | p = p[:-1] |
|
46 | 46 | up = os.path.dirname(p) |
|
47 | 47 | if up == "/": |
|
48 | 48 | return "/" |
|
49 | 49 | return up + "/" |
|
50 | 50 | |
|
51 | 51 | def _navseq(step, firststep=None): |
|
52 | 52 | if firststep: |
|
53 | 53 | yield firststep |
|
54 | 54 | if firststep >= 20 and firststep <= 40: |
|
55 | 55 | firststep = 50 |
|
56 | 56 | yield firststep |
|
57 | 57 | assert step > 0 |
|
58 | 58 | assert firststep > 0 |
|
59 | 59 | while step <= firststep: |
|
60 | 60 | step *= 10 |
|
61 | 61 | while True: |
|
62 | 62 | yield 1 * step |
|
63 | 63 | yield 3 * step |
|
64 | 64 | step *= 10 |
|
65 | 65 | |
|
66 | 66 | class revnav(object): |
|
67 | 67 | |
|
68 | 68 | def __init__(self, repo): |
|
69 | 69 | """Navigation generation object |
|
70 | 70 | |
|
71 | 71 | :repo: repo object we generate nav for |
|
72 | 72 | """ |
|
73 | 73 | # used for hex generation |
|
74 | 74 | self._revlog = repo.changelog |
|
75 | 75 | |
|
76 | 76 | def __nonzero__(self): |
|
77 | 77 | """return True if any revision to navigate over""" |
|
78 | 78 | return self._first() is not None |
|
79 | 79 | |
|
80 | 80 | __bool__ = __nonzero__ |
|
81 | 81 | |
|
82 | 82 | def _first(self): |
|
83 | 83 | """return the minimum non-filtered changeset or None""" |
|
84 | 84 | try: |
|
85 | 85 | return next(iter(self._revlog)) |
|
86 | 86 | except StopIteration: |
|
87 | 87 | return None |
|
88 | 88 | |
|
89 | 89 | def hex(self, rev): |
|
90 | 90 | return hex(self._revlog.node(rev)) |
|
91 | 91 | |
|
92 | 92 | def gen(self, pos, pagelen, limit): |
|
93 | 93 | """computes label and revision id for navigation link |
|
94 | 94 | |
|
95 | 95 | :pos: is the revision relative to which we generate navigation. |
|
96 | 96 | :pagelen: the size of each navigation page |
|
97 | 97 | :limit: how far shall we link |
|
98 | 98 | |
|
99 | 99 | The return is: |
|
100 | 100 | - a single element tuple |
|
101 | 101 | - containing a dictionary with a `before` and `after` key |
|
102 | 102 | - values are generator functions taking arbitrary number of kwargs |
|
103 | 103 | - yield items are dictionaries with `label` and `node` keys |
|
104 | 104 | """ |
|
105 | 105 | if not self: |
|
106 | 106 | # empty repo |
|
107 | 107 | return ({'before': (), 'after': ()},) |
|
108 | 108 | |
|
109 | 109 | targets = [] |
|
110 | 110 | for f in _navseq(1, pagelen): |
|
111 | 111 | if f > limit: |
|
112 | 112 | break |
|
113 | 113 | targets.append(pos + f) |
|
114 | 114 | targets.append(pos - f) |
|
115 | 115 | targets.sort() |
|
116 | 116 | |
|
117 | 117 | first = self._first() |
|
118 | 118 | navbefore = [("(%i)" % first, self.hex(first))] |
|
119 | 119 | navafter = [] |
|
120 | 120 | for rev in targets: |
|
121 | 121 | if rev not in self._revlog: |
|
122 | 122 | continue |
|
123 | 123 | if pos < rev < limit: |
|
124 | 124 | navafter.append(("+%d" % abs(rev - pos), self.hex(rev))) |
|
125 | 125 | if 0 < rev < pos: |
|
126 | 126 | navbefore.append(("-%d" % abs(rev - pos), self.hex(rev))) |
|
127 | 127 | |
|
128 | 128 | |
|
129 | 129 | navafter.append(("tip", "tip")) |
|
130 | 130 | |
|
131 | 131 | data = lambda i: {"label": i[0], "node": i[1]} |
|
132 | 132 | return ({'before': lambda **map: (data(i) for i in navbefore), |
|
133 | 133 | 'after': lambda **map: (data(i) for i in navafter)},) |
|
134 | 134 | |
|
135 | 135 | class filerevnav(revnav): |
|
136 | 136 | |
|
137 | 137 | def __init__(self, repo, path): |
|
138 | 138 | """Navigation generation object |
|
139 | 139 | |
|
140 | 140 | :repo: repo object we generate nav for |
|
141 | 141 | :path: path of the file we generate nav for |
|
142 | 142 | """ |
|
143 | 143 | # used for iteration |
|
144 | 144 | self._changelog = repo.unfiltered().changelog |
|
145 | 145 | # used for hex generation |
|
146 | 146 | self._revlog = repo.file(path) |
|
147 | 147 | |
|
148 | 148 | def hex(self, rev): |
|
149 | 149 | return hex(self._changelog.node(self._revlog.linkrev(rev))) |
|
150 | 150 | |
|
151 | 151 | class _siblings(object): |
|
152 | 152 | def __init__(self, siblings=None, hiderev=None): |
|
153 | 153 | if siblings is None: |
|
154 | 154 | siblings = [] |
|
155 | 155 | self.siblings = [s for s in siblings if s.node() != nullid] |
|
156 | 156 | if len(self.siblings) == 1 and self.siblings[0].rev() == hiderev: |
|
157 | 157 | self.siblings = [] |
|
158 | 158 | |
|
159 | 159 | def __iter__(self): |
|
160 | 160 | for s in self.siblings: |
|
161 | 161 | d = { |
|
162 | 162 | 'node': s.hex(), |
|
163 | 163 | 'rev': s.rev(), |
|
164 | 164 | 'user': s.user(), |
|
165 | 165 | 'date': s.date(), |
|
166 | 166 | 'description': s.description(), |
|
167 | 167 | 'branch': s.branch(), |
|
168 | 168 | } |
|
169 | 169 | if util.safehasattr(s, 'path'): |
|
170 | 170 | d['file'] = s.path() |
|
171 | 171 | yield d |
|
172 | 172 | |
|
173 | 173 | def __len__(self): |
|
174 | 174 | return len(self.siblings) |
|
175 | 175 | |
|
176 | 176 | def difffeatureopts(req, ui, section): |
|
177 | 177 | diffopts = patch.difffeatureopts(ui, untrusted=True, |
|
178 | 178 | section=section, whitespace=True) |
|
179 | 179 | |
|
180 | 180 | for k in ('ignorews', 'ignorewsamount', 'ignorewseol', 'ignoreblanklines'): |
|
181 | 181 | v = req.qsparams.get(k) |
|
182 | 182 | if v is not None: |
|
183 | 183 | v = util.parsebool(v) |
|
184 | 184 | setattr(diffopts, k, v if v is not None else True) |
|
185 | 185 | |
|
186 | 186 | return diffopts |
|
187 | 187 | |
|
188 | 188 | def annotate(req, fctx, ui): |
|
189 | 189 | diffopts = difffeatureopts(req, ui, 'annotate') |
|
190 | 190 | return fctx.annotate(follow=True, linenumber=True, diffopts=diffopts) |
|
191 | 191 | |
|
192 | 192 | def parents(ctx, hide=None): |
|
193 | 193 | if isinstance(ctx, context.basefilectx): |
|
194 | 194 | introrev = ctx.introrev() |
|
195 | 195 | if ctx.changectx().rev() != introrev: |
|
196 | 196 | return _siblings([ctx.repo()[introrev]], hide) |
|
197 | 197 | return _siblings(ctx.parents(), hide) |
|
198 | 198 | |
|
199 | 199 | def children(ctx, hide=None): |
|
200 | 200 | return _siblings(ctx.children(), hide) |
|
201 | 201 | |
|
202 | 202 | def renamelink(fctx): |
|
203 | 203 | r = fctx.renamed() |
|
204 | 204 | if r: |
|
205 | 205 | return [{'file': r[0], 'node': hex(r[1])}] |
|
206 | 206 | return [] |
|
207 | 207 | |
|
208 | 208 | def nodetagsdict(repo, node): |
|
209 | 209 | return [{"name": i} for i in repo.nodetags(node)] |
|
210 | 210 | |
|
211 | 211 | def nodebookmarksdict(repo, node): |
|
212 | 212 | return [{"name": i} for i in repo.nodebookmarks(node)] |
|
213 | 213 | |
|
214 | 214 | def nodebranchdict(repo, ctx): |
|
215 | 215 | branches = [] |
|
216 | 216 | branch = ctx.branch() |
|
217 | 217 | # If this is an empty repo, ctx.node() == nullid, |
|
218 | 218 | # ctx.branch() == 'default'. |
|
219 | 219 | try: |
|
220 | 220 | branchnode = repo.branchtip(branch) |
|
221 | 221 | except error.RepoLookupError: |
|
222 | 222 | branchnode = None |
|
223 | 223 | if branchnode == ctx.node(): |
|
224 | 224 | branches.append({"name": branch}) |
|
225 | 225 | return branches |
|
226 | 226 | |
|
227 | 227 | def nodeinbranch(repo, ctx): |
|
228 | 228 | branches = [] |
|
229 | 229 | branch = ctx.branch() |
|
230 | 230 | try: |
|
231 | 231 | branchnode = repo.branchtip(branch) |
|
232 | 232 | except error.RepoLookupError: |
|
233 | 233 | branchnode = None |
|
234 | 234 | if branch != 'default' and branchnode != ctx.node(): |
|
235 | 235 | branches.append({"name": branch}) |
|
236 | 236 | return branches |
|
237 | 237 | |
|
238 | 238 | def nodebranchnodefault(ctx): |
|
239 | 239 | branches = [] |
|
240 | 240 | branch = ctx.branch() |
|
241 | 241 | if branch != 'default': |
|
242 | 242 | branches.append({"name": branch}) |
|
243 | 243 | return branches |
|
244 | 244 | |
|
245 | 245 | def showtag(repo, tmpl, t1, node=nullid, **args): |
|
246 | args = pycompat.byteskwargs(args) | |
|
246 | 247 | for t in repo.nodetags(node): |
|
247 | yield tmpl(t1, tag=t, **args) | |
|
248 | lm = args.copy() | |
|
249 | lm['tag'] = t | |
|
250 | yield tmpl.generate(t1, lm) | |
|
248 | 251 | |
|
249 | 252 | def showbookmark(repo, tmpl, t1, node=nullid, **args): |
|
253 | args = pycompat.byteskwargs(args) | |
|
250 | 254 | for t in repo.nodebookmarks(node): |
|
251 | yield tmpl(t1, bookmark=t, **args) | |
|
255 | lm = args.copy() | |
|
256 | lm['bookmark'] = t | |
|
257 | yield tmpl.generate(t1, lm) | |
|
252 | 258 | |
|
253 | 259 | def branchentries(repo, stripecount, limit=0): |
|
254 | 260 | tips = [] |
|
255 | 261 | heads = repo.heads() |
|
256 | 262 | parity = paritygen(stripecount) |
|
257 | 263 | sortkey = lambda item: (not item[1], item[0].rev()) |
|
258 | 264 | |
|
259 | 265 | def entries(**map): |
|
260 | 266 | count = 0 |
|
261 | 267 | if not tips: |
|
262 | 268 | for tag, hs, tip, closed in repo.branchmap().iterbranches(): |
|
263 | 269 | tips.append((repo[tip], closed)) |
|
264 | 270 | for ctx, closed in sorted(tips, key=sortkey, reverse=True): |
|
265 | 271 | if limit > 0 and count >= limit: |
|
266 | 272 | return |
|
267 | 273 | count += 1 |
|
268 | 274 | if closed: |
|
269 | 275 | status = 'closed' |
|
270 | 276 | elif ctx.node() not in heads: |
|
271 | 277 | status = 'inactive' |
|
272 | 278 | else: |
|
273 | 279 | status = 'open' |
|
274 | 280 | yield { |
|
275 | 281 | 'parity': next(parity), |
|
276 | 282 | 'branch': ctx.branch(), |
|
277 | 283 | 'status': status, |
|
278 | 284 | 'node': ctx.hex(), |
|
279 | 285 | 'date': ctx.date() |
|
280 | 286 | } |
|
281 | 287 | |
|
282 | 288 | return entries |
|
283 | 289 | |
|
284 | 290 | def cleanpath(repo, path): |
|
285 | 291 | path = path.lstrip('/') |
|
286 | 292 | return pathutil.canonpath(repo.root, '', path) |
|
287 | 293 | |
|
288 | 294 | def changeidctx(repo, changeid): |
|
289 | 295 | try: |
|
290 | 296 | ctx = repo[changeid] |
|
291 | 297 | except error.RepoError: |
|
292 | 298 | man = repo.manifestlog._revlog |
|
293 | 299 | ctx = repo[man.linkrev(man.rev(man.lookup(changeid)))] |
|
294 | 300 | |
|
295 | 301 | return ctx |
|
296 | 302 | |
|
297 | 303 | def changectx(repo, req): |
|
298 | 304 | changeid = "tip" |
|
299 | 305 | if 'node' in req.qsparams: |
|
300 | 306 | changeid = req.qsparams['node'] |
|
301 | 307 | ipos = changeid.find(':') |
|
302 | 308 | if ipos != -1: |
|
303 | 309 | changeid = changeid[(ipos + 1):] |
|
304 | 310 | elif 'manifest' in req.qsparams: |
|
305 | 311 | changeid = req.qsparams['manifest'] |
|
306 | 312 | |
|
307 | 313 | return changeidctx(repo, changeid) |
|
308 | 314 | |
|
309 | 315 | def basechangectx(repo, req): |
|
310 | 316 | if 'node' in req.qsparams: |
|
311 | 317 | changeid = req.qsparams['node'] |
|
312 | 318 | ipos = changeid.find(':') |
|
313 | 319 | if ipos != -1: |
|
314 | 320 | changeid = changeid[:ipos] |
|
315 | 321 | return changeidctx(repo, changeid) |
|
316 | 322 | |
|
317 | 323 | return None |
|
318 | 324 | |
|
319 | 325 | def filectx(repo, req): |
|
320 | 326 | if 'file' not in req.qsparams: |
|
321 | 327 | raise ErrorResponse(HTTP_NOT_FOUND, 'file not given') |
|
322 | 328 | path = cleanpath(repo, req.qsparams['file']) |
|
323 | 329 | if 'node' in req.qsparams: |
|
324 | 330 | changeid = req.qsparams['node'] |
|
325 | 331 | elif 'filenode' in req.qsparams: |
|
326 | 332 | changeid = req.qsparams['filenode'] |
|
327 | 333 | else: |
|
328 | 334 | raise ErrorResponse(HTTP_NOT_FOUND, 'node or filenode not given') |
|
329 | 335 | try: |
|
330 | 336 | fctx = repo[changeid][path] |
|
331 | 337 | except error.RepoError: |
|
332 | 338 | fctx = repo.filectx(path, fileid=changeid) |
|
333 | 339 | |
|
334 | 340 | return fctx |
|
335 | 341 | |
|
336 | 342 | def linerange(req): |
|
337 | 343 | linerange = req.qsparams.getall('linerange') |
|
338 | 344 | if not linerange: |
|
339 | 345 | return None |
|
340 | 346 | if len(linerange) > 1: |
|
341 | 347 | raise ErrorResponse(HTTP_BAD_REQUEST, |
|
342 | 348 | 'redundant linerange parameter') |
|
343 | 349 | try: |
|
344 | 350 | fromline, toline = map(int, linerange[0].split(':', 1)) |
|
345 | 351 | except ValueError: |
|
346 | 352 | raise ErrorResponse(HTTP_BAD_REQUEST, |
|
347 | 353 | 'invalid linerange parameter') |
|
348 | 354 | try: |
|
349 | 355 | return util.processlinerange(fromline, toline) |
|
350 | 356 | except error.ParseError as exc: |
|
351 | 357 | raise ErrorResponse(HTTP_BAD_REQUEST, pycompat.bytestr(exc)) |
|
352 | 358 | |
|
353 | 359 | def formatlinerange(fromline, toline): |
|
354 | 360 | return '%d:%d' % (fromline + 1, toline) |
|
355 | 361 | |
|
356 | 362 | def succsandmarkers(context, mapping): |
|
357 | 363 | repo = context.resource(mapping, 'repo') |
|
358 | 364 | for item in templatekw.showsuccsandmarkers(context, mapping): |
|
359 | 365 | item['successors'] = _siblings(repo[successor] |
|
360 | 366 | for successor in item['successors']) |
|
361 | 367 | yield item |
|
362 | 368 | |
|
363 | 369 | # teach templater succsandmarkers is switched to (context, mapping) API |
|
364 | 370 | succsandmarkers._requires = {'repo', 'ctx', 'templ'} |
|
365 | 371 | |
|
366 | 372 | def whyunstable(context, mapping): |
|
367 | 373 | repo = context.resource(mapping, 'repo') |
|
368 | 374 | ctx = context.resource(mapping, 'ctx') |
|
369 | 375 | |
|
370 | 376 | entries = obsutil.whyunstable(repo, ctx) |
|
371 | 377 | for entry in entries: |
|
372 | 378 | if entry.get('divergentnodes'): |
|
373 | 379 | entry['divergentnodes'] = _siblings(entry['divergentnodes']) |
|
374 | 380 | yield entry |
|
375 | 381 | |
|
376 | 382 | whyunstable._requires = {'repo', 'ctx', 'templ'} |
|
377 | 383 | |
|
378 | 384 | def commonentry(repo, ctx): |
|
379 | 385 | node = ctx.node() |
|
380 | 386 | return { |
|
381 | 387 | # TODO: perhaps ctx.changectx() should be assigned if ctx is a |
|
382 | 388 | # filectx, but I'm not pretty sure if that would always work because |
|
383 | 389 | # fctx.parents() != fctx.changectx.parents() for example. |
|
384 | 390 | 'ctx': ctx, |
|
385 | 391 | 'revcache': {}, |
|
386 | 392 | 'rev': ctx.rev(), |
|
387 | 393 | 'node': hex(node), |
|
388 | 394 | 'author': ctx.user(), |
|
389 | 395 | 'desc': ctx.description(), |
|
390 | 396 | 'date': ctx.date(), |
|
391 | 397 | 'extra': ctx.extra(), |
|
392 | 398 | 'phase': ctx.phasestr(), |
|
393 | 399 | 'obsolete': ctx.obsolete(), |
|
394 | 400 | 'succsandmarkers': succsandmarkers, |
|
395 | 401 | 'instabilities': [{"instability": i} for i in ctx.instabilities()], |
|
396 | 402 | 'whyunstable': whyunstable, |
|
397 | 403 | 'branch': nodebranchnodefault(ctx), |
|
398 | 404 | 'inbranch': nodeinbranch(repo, ctx), |
|
399 | 405 | 'branches': nodebranchdict(repo, ctx), |
|
400 | 406 | 'tags': nodetagsdict(repo, node), |
|
401 | 407 | 'bookmarks': nodebookmarksdict(repo, node), |
|
402 | 408 | 'parent': lambda **x: parents(ctx), |
|
403 | 409 | 'child': lambda **x: children(ctx), |
|
404 | 410 | } |
|
405 | 411 | |
|
406 | 412 | def changelistentry(web, ctx): |
|
407 | 413 | '''Obtain a dictionary to be used for entries in a changelist. |
|
408 | 414 | |
|
409 | 415 | This function is called when producing items for the "entries" list passed |
|
410 | 416 | to the "shortlog" and "changelog" templates. |
|
411 | 417 | ''' |
|
412 | 418 | repo = web.repo |
|
413 | 419 | rev = ctx.rev() |
|
414 | 420 | n = ctx.node() |
|
415 | 421 | showtags = showtag(repo, web.tmpl, 'changelogtag', n) |
|
416 | 422 | files = listfilediffs(web.tmpl, ctx.files(), n, web.maxfiles) |
|
417 | 423 | |
|
418 | 424 | entry = commonentry(repo, ctx) |
|
419 | 425 | entry.update( |
|
420 | 426 | allparents=lambda **x: parents(ctx), |
|
421 | 427 | parent=lambda **x: parents(ctx, rev - 1), |
|
422 | 428 | child=lambda **x: children(ctx, rev + 1), |
|
423 | 429 | changelogtag=showtags, |
|
424 | 430 | files=files, |
|
425 | 431 | ) |
|
426 | 432 | return entry |
|
427 | 433 | |
|
428 | 434 | def symrevorshortnode(req, ctx): |
|
429 | 435 | if 'node' in req.qsparams: |
|
430 | 436 | return templatefilters.revescape(req.qsparams['node']) |
|
431 | 437 | else: |
|
432 | 438 | return short(ctx.node()) |
|
433 | 439 | |
|
434 | 440 | def changesetentry(web, ctx): |
|
435 | 441 | '''Obtain a dictionary to be used to render the "changeset" template.''' |
|
436 | 442 | |
|
437 | 443 | showtags = showtag(web.repo, web.tmpl, 'changesettag', ctx.node()) |
|
438 | 444 | showbookmarks = showbookmark(web.repo, web.tmpl, 'changesetbookmark', |
|
439 | 445 | ctx.node()) |
|
440 | 446 | showbranch = nodebranchnodefault(ctx) |
|
441 | 447 | |
|
442 | 448 | files = [] |
|
443 | 449 | parity = paritygen(web.stripecount) |
|
444 | 450 | for blockno, f in enumerate(ctx.files()): |
|
445 | 451 | template = 'filenodelink' if f in ctx else 'filenolink' |
|
446 | files.append(web.tmpl(template, | |
|
447 | node=ctx.hex(), file=f, blockno=blockno + 1, | |
|
448 | parity=next(parity))) | |
|
452 | files.append(web.tmpl.generate(template, { | |
|
453 | 'node': ctx.hex(), | |
|
454 | 'file': f, | |
|
455 | 'blockno': blockno + 1, | |
|
456 | 'parity': next(parity), | |
|
457 | })) | |
|
449 | 458 | |
|
450 | 459 | basectx = basechangectx(web.repo, web.req) |
|
451 | 460 | if basectx is None: |
|
452 | 461 | basectx = ctx.p1() |
|
453 | 462 | |
|
454 | 463 | style = web.config('web', 'style') |
|
455 | 464 | if 'style' in web.req.qsparams: |
|
456 | 465 | style = web.req.qsparams['style'] |
|
457 | 466 | |
|
458 | 467 | diff = diffs(web, ctx, basectx, None, style) |
|
459 | 468 | |
|
460 | 469 | parity = paritygen(web.stripecount) |
|
461 | 470 | diffstatsgen = diffstatgen(ctx, basectx) |
|
462 | 471 | diffstats = diffstat(web.tmpl, ctx, diffstatsgen, parity) |
|
463 | 472 | |
|
464 | 473 | return dict( |
|
465 | 474 | diff=diff, |
|
466 | 475 | symrev=symrevorshortnode(web.req, ctx), |
|
467 | 476 | basenode=basectx.hex(), |
|
468 | 477 | changesettag=showtags, |
|
469 | 478 | changesetbookmark=showbookmarks, |
|
470 | 479 | changesetbranch=showbranch, |
|
471 | 480 | files=files, |
|
472 | 481 | diffsummary=lambda **x: diffsummary(diffstatsgen), |
|
473 | 482 | diffstat=diffstats, |
|
474 | 483 | archives=web.archivelist(ctx.hex()), |
|
475 | 484 | **pycompat.strkwargs(commonentry(web.repo, ctx))) |
|
476 | 485 | |
|
477 | 486 | def listfilediffs(tmpl, files, node, max): |
|
478 | 487 | for f in files[:max]: |
|
479 |
yield tmpl('filedifflink', node |
|
|
488 | yield tmpl.generate('filedifflink', {'node': hex(node), 'file': f}) | |
|
480 | 489 | if len(files) > max: |
|
481 | yield tmpl('fileellipses') | |
|
490 | yield tmpl.generate('fileellipses', {}) | |
|
482 | 491 | |
|
483 | 492 | def diffs(web, ctx, basectx, files, style, linerange=None, |
|
484 | 493 | lineidprefix=''): |
|
485 | 494 | |
|
486 | 495 | def prettyprintlines(lines, blockno): |
|
487 | 496 | for lineno, l in enumerate(lines, 1): |
|
488 | 497 | difflineno = "%d.%d" % (blockno, lineno) |
|
489 | 498 | if l.startswith('+'): |
|
490 | 499 | ltype = "difflineplus" |
|
491 | 500 | elif l.startswith('-'): |
|
492 | 501 | ltype = "difflineminus" |
|
493 | 502 | elif l.startswith('@'): |
|
494 | 503 | ltype = "difflineat" |
|
495 | 504 | else: |
|
496 | 505 | ltype = "diffline" |
|
497 | yield web.tmpl( | |
|
498 |
|
|
|
499 |
|
|
|
500 |
|
|
|
501 |
|
|
|
502 | linenumber="% 8s" % difflineno) | |
|
506 | yield web.tmpl.generate(ltype, { | |
|
507 | 'line': l, | |
|
508 | 'lineno': lineno, | |
|
509 | 'lineid': lineidprefix + "l%s" % difflineno, | |
|
510 | 'linenumber': "% 8s" % difflineno, | |
|
511 | }) | |
|
503 | 512 | |
|
504 | 513 | repo = web.repo |
|
505 | 514 | if files: |
|
506 | 515 | m = match.exact(repo.root, repo.getcwd(), files) |
|
507 | 516 | else: |
|
508 | 517 | m = match.always(repo.root, repo.getcwd()) |
|
509 | 518 | |
|
510 | 519 | diffopts = patch.diffopts(repo.ui, untrusted=True) |
|
511 | 520 | node1 = basectx.node() |
|
512 | 521 | node2 = ctx.node() |
|
513 | 522 | parity = paritygen(web.stripecount) |
|
514 | 523 | |
|
515 | 524 | diffhunks = patch.diffhunks(repo, node1, node2, m, opts=diffopts) |
|
516 | 525 | for blockno, (fctx1, fctx2, header, hunks) in enumerate(diffhunks, 1): |
|
517 | 526 | if style != 'raw': |
|
518 | 527 | header = header[1:] |
|
519 | 528 | lines = [h + '\n' for h in header] |
|
520 | 529 | for hunkrange, hunklines in hunks: |
|
521 | 530 | if linerange is not None and hunkrange is not None: |
|
522 | 531 | s1, l1, s2, l2 = hunkrange |
|
523 | 532 | if not mdiff.hunkinrange((s2, l2), linerange): |
|
524 | 533 | continue |
|
525 | 534 | lines.extend(hunklines) |
|
526 | 535 | if lines: |
|
527 |
yield web.tmpl('diffblock', |
|
|
528 | lines=prettyprintlines(lines, blockno)) | |
|
536 | yield web.tmpl.generate('diffblock', { | |
|
537 | 'parity': next(parity), | |
|
538 | 'blockno': blockno, | |
|
539 | 'lines': prettyprintlines(lines, blockno), | |
|
540 | }) | |
|
529 | 541 | |
|
530 | 542 | def compare(tmpl, context, leftlines, rightlines): |
|
531 | 543 | '''Generator function that provides side-by-side comparison data.''' |
|
532 | 544 | |
|
533 | 545 | def compline(type, leftlineno, leftline, rightlineno, rightline): |
|
534 | 546 | lineid = leftlineno and ("l%d" % leftlineno) or '' |
|
535 | 547 | lineid += rightlineno and ("r%d" % rightlineno) or '' |
|
536 | 548 | llno = '%d' % leftlineno if leftlineno else '' |
|
537 | 549 | rlno = '%d' % rightlineno if rightlineno else '' |
|
538 | return tmpl('comparisonline', | |
|
539 |
|
|
|
540 |
|
|
|
541 |
|
|
|
542 |
|
|
|
543 |
|
|
|
544 |
|
|
|
545 |
|
|
|
546 |
|
|
|
550 | return tmpl.generate('comparisonline', { | |
|
551 | 'type': type, | |
|
552 | 'lineid': lineid, | |
|
553 | 'leftlineno': leftlineno, | |
|
554 | 'leftlinenumber': "% 6s" % llno, | |
|
555 | 'leftline': leftline or '', | |
|
556 | 'rightlineno': rightlineno, | |
|
557 | 'rightlinenumber': "% 6s" % rlno, | |
|
558 | 'rightline': rightline or '', | |
|
559 | }) | |
|
547 | 560 | |
|
548 | 561 | def getblock(opcodes): |
|
549 | 562 | for type, llo, lhi, rlo, rhi in opcodes: |
|
550 | 563 | len1 = lhi - llo |
|
551 | 564 | len2 = rhi - rlo |
|
552 | 565 | count = min(len1, len2) |
|
553 | 566 | for i in xrange(count): |
|
554 | 567 | yield compline(type=type, |
|
555 | 568 | leftlineno=llo + i + 1, |
|
556 | 569 | leftline=leftlines[llo + i], |
|
557 | 570 | rightlineno=rlo + i + 1, |
|
558 | 571 | rightline=rightlines[rlo + i]) |
|
559 | 572 | if len1 > len2: |
|
560 | 573 | for i in xrange(llo + count, lhi): |
|
561 | 574 | yield compline(type=type, |
|
562 | 575 | leftlineno=i + 1, |
|
563 | 576 | leftline=leftlines[i], |
|
564 | 577 | rightlineno=None, |
|
565 | 578 | rightline=None) |
|
566 | 579 | elif len2 > len1: |
|
567 | 580 | for i in xrange(rlo + count, rhi): |
|
568 | 581 | yield compline(type=type, |
|
569 | 582 | leftlineno=None, |
|
570 | 583 | leftline=None, |
|
571 | 584 | rightlineno=i + 1, |
|
572 | 585 | rightline=rightlines[i]) |
|
573 | 586 | |
|
574 | 587 | s = difflib.SequenceMatcher(None, leftlines, rightlines) |
|
575 | 588 | if context < 0: |
|
576 |
yield tmpl('comparisonblock', |
|
|
589 | yield tmpl.generate('comparisonblock', | |
|
590 | {'lines': getblock(s.get_opcodes())}) | |
|
577 | 591 | else: |
|
578 | 592 | for oc in s.get_grouped_opcodes(n=context): |
|
579 |
yield tmpl('comparisonblock', lines |
|
|
593 | yield tmpl.generate('comparisonblock', {'lines': getblock(oc)}) | |
|
580 | 594 | |
|
581 | 595 | def diffstatgen(ctx, basectx): |
|
582 | 596 | '''Generator function that provides the diffstat data.''' |
|
583 | 597 | |
|
584 | 598 | stats = patch.diffstatdata( |
|
585 | 599 | util.iterlines(ctx.diff(basectx, noprefix=False))) |
|
586 | 600 | maxname, maxtotal, addtotal, removetotal, binary = patch.diffstatsum(stats) |
|
587 | 601 | while True: |
|
588 | 602 | yield stats, maxname, maxtotal, addtotal, removetotal, binary |
|
589 | 603 | |
|
590 | 604 | def diffsummary(statgen): |
|
591 | 605 | '''Return a short summary of the diff.''' |
|
592 | 606 | |
|
593 | 607 | stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen) |
|
594 | 608 | return _(' %d files changed, %d insertions(+), %d deletions(-)\n') % ( |
|
595 | 609 | len(stats), addtotal, removetotal) |
|
596 | 610 | |
|
597 | 611 | def diffstat(tmpl, ctx, statgen, parity): |
|
598 | 612 | '''Return a diffstat template for each file in the diff.''' |
|
599 | 613 | |
|
600 | 614 | stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen) |
|
601 | 615 | files = ctx.files() |
|
602 | 616 | |
|
603 | 617 | def pct(i): |
|
604 | 618 | if maxtotal == 0: |
|
605 | 619 | return 0 |
|
606 | 620 | return (float(i) / maxtotal) * 100 |
|
607 | 621 | |
|
608 | 622 | fileno = 0 |
|
609 | 623 | for filename, adds, removes, isbinary in stats: |
|
610 | 624 | template = 'diffstatlink' if filename in files else 'diffstatnolink' |
|
611 | 625 | total = adds + removes |
|
612 | 626 | fileno += 1 |
|
613 | yield tmpl(template, node=ctx.hex(), file=filename, fileno=fileno, | |
|
614 | total=total, addpct=pct(adds), removepct=pct(removes), | |
|
615 | parity=next(parity)) | |
|
627 | yield tmpl.generate(template, { | |
|
628 | 'node': ctx.hex(), | |
|
629 | 'file': filename, | |
|
630 | 'fileno': fileno, | |
|
631 | 'total': total, | |
|
632 | 'addpct': pct(adds), | |
|
633 | 'removepct': pct(removes), | |
|
634 | 'parity': next(parity), | |
|
635 | }) | |
|
616 | 636 | |
|
617 | 637 | class sessionvars(object): |
|
618 | 638 | def __init__(self, vars, start='?'): |
|
619 | 639 | self.start = start |
|
620 | 640 | self.vars = vars |
|
621 | 641 | def __getitem__(self, key): |
|
622 | 642 | return self.vars[key] |
|
623 | 643 | def __setitem__(self, key, value): |
|
624 | 644 | self.vars[key] = value |
|
625 | 645 | def __copy__(self): |
|
626 | 646 | return sessionvars(copy.copy(self.vars), self.start) |
|
627 | 647 | def __iter__(self): |
|
628 | 648 | separator = self.start |
|
629 | 649 | for key, value in sorted(self.vars.iteritems()): |
|
630 | 650 | yield {'name': key, |
|
631 | 651 | 'value': pycompat.bytestr(value), |
|
632 | 652 | 'separator': separator, |
|
633 | 653 | } |
|
634 | 654 | separator = '&' |
|
635 | 655 | |
|
636 | 656 | class wsgiui(uimod.ui): |
|
637 | 657 | # default termwidth breaks under mod_wsgi |
|
638 | 658 | def termwidth(self): |
|
639 | 659 | return 80 |
|
640 | 660 | |
|
641 | 661 | def getwebsubs(repo): |
|
642 | 662 | websubtable = [] |
|
643 | 663 | websubdefs = repo.ui.configitems('websub') |
|
644 | 664 | # we must maintain interhg backwards compatibility |
|
645 | 665 | websubdefs += repo.ui.configitems('interhg') |
|
646 | 666 | for key, pattern in websubdefs: |
|
647 | 667 | # grab the delimiter from the character after the "s" |
|
648 | 668 | unesc = pattern[1:2] |
|
649 | 669 | delim = re.escape(unesc) |
|
650 | 670 | |
|
651 | 671 | # identify portions of the pattern, taking care to avoid escaped |
|
652 | 672 | # delimiters. the replace format and flags are optional, but |
|
653 | 673 | # delimiters are required. |
|
654 | 674 | match = re.match( |
|
655 | 675 | br'^s%s(.+)(?:(?<=\\\\)|(?<!\\))%s(.*)%s([ilmsux])*$' |
|
656 | 676 | % (delim, delim, delim), pattern) |
|
657 | 677 | if not match: |
|
658 | 678 | repo.ui.warn(_("websub: invalid pattern for %s: %s\n") |
|
659 | 679 | % (key, pattern)) |
|
660 | 680 | continue |
|
661 | 681 | |
|
662 | 682 | # we need to unescape the delimiter for regexp and format |
|
663 | 683 | delim_re = re.compile(br'(?<!\\)\\%s' % delim) |
|
664 | 684 | regexp = delim_re.sub(unesc, match.group(1)) |
|
665 | 685 | format = delim_re.sub(unesc, match.group(2)) |
|
666 | 686 | |
|
667 | 687 | # the pattern allows for 6 regexp flags, so set them if necessary |
|
668 | 688 | flagin = match.group(3) |
|
669 | 689 | flags = 0 |
|
670 | 690 | if flagin: |
|
671 | 691 | for flag in flagin.upper(): |
|
672 | 692 | flags |= re.__dict__[flag] |
|
673 | 693 | |
|
674 | 694 | try: |
|
675 | 695 | regexp = re.compile(regexp, flags) |
|
676 | 696 | websubtable.append((regexp, format)) |
|
677 | 697 | except re.error: |
|
678 | 698 | repo.ui.warn(_("websub: invalid regexp for %s: %s\n") |
|
679 | 699 | % (key, regexp)) |
|
680 | 700 | return websubtable |
@@ -1,802 +1,802 b'' | |||
|
1 | 1 | # templatekw.py - common changeset template keywords |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | from .i18n import _ |
|
11 | 11 | from .node import ( |
|
12 | 12 | hex, |
|
13 | 13 | nullid, |
|
14 | 14 | ) |
|
15 | 15 | |
|
16 | 16 | from . import ( |
|
17 | 17 | encoding, |
|
18 | 18 | error, |
|
19 | 19 | hbisect, |
|
20 | 20 | i18n, |
|
21 | 21 | obsutil, |
|
22 | 22 | patch, |
|
23 | 23 | pycompat, |
|
24 | 24 | registrar, |
|
25 | 25 | scmutil, |
|
26 | 26 | templateutil, |
|
27 | 27 | util, |
|
28 | 28 | ) |
|
29 | 29 | |
|
30 | 30 | _hybrid = templateutil.hybrid |
|
31 | 31 | _mappable = templateutil.mappable |
|
32 | 32 | _showlist = templateutil._showlist |
|
33 | 33 | hybriddict = templateutil.hybriddict |
|
34 | 34 | hybridlist = templateutil.hybridlist |
|
35 | 35 | compatdict = templateutil.compatdict |
|
36 | 36 | compatlist = templateutil.compatlist |
|
37 | 37 | |
|
38 | 38 | def showdict(name, data, mapping, plural=None, key='key', value='value', |
|
39 | 39 | fmt=None, separator=' '): |
|
40 | 40 | ui = mapping.get('ui') |
|
41 | 41 | if ui: |
|
42 | 42 | ui.deprecwarn("templatekw.showdict() is deprecated, use " |
|
43 | 43 | "templateutil.compatdict()", '4.6') |
|
44 | 44 | c = [{key: k, value: v} for k, v in data.iteritems()] |
|
45 | 45 | f = _showlist(name, c, mapping['templ'], mapping, plural, separator) |
|
46 | 46 | return hybriddict(data, key=key, value=value, fmt=fmt, gen=f) |
|
47 | 47 | |
|
48 | 48 | def showlist(name, values, mapping, plural=None, element=None, separator=' '): |
|
49 | 49 | ui = mapping.get('ui') |
|
50 | 50 | if ui: |
|
51 | 51 | ui.deprecwarn("templatekw.showlist() is deprecated, use " |
|
52 | 52 | "templateutil.compatlist()", '4.6') |
|
53 | 53 | if not element: |
|
54 | 54 | element = name |
|
55 | 55 | f = _showlist(name, values, mapping['templ'], mapping, plural, separator) |
|
56 | 56 | return hybridlist(values, name=element, gen=f) |
|
57 | 57 | |
|
58 | 58 | def getlatesttags(context, mapping, pattern=None): |
|
59 | 59 | '''return date, distance and name for the latest tag of rev''' |
|
60 | 60 | repo = context.resource(mapping, 'repo') |
|
61 | 61 | ctx = context.resource(mapping, 'ctx') |
|
62 | 62 | cache = context.resource(mapping, 'cache') |
|
63 | 63 | |
|
64 | 64 | cachename = 'latesttags' |
|
65 | 65 | if pattern is not None: |
|
66 | 66 | cachename += '-' + pattern |
|
67 | 67 | match = util.stringmatcher(pattern)[2] |
|
68 | 68 | else: |
|
69 | 69 | match = util.always |
|
70 | 70 | |
|
71 | 71 | if cachename not in cache: |
|
72 | 72 | # Cache mapping from rev to a tuple with tag date, tag |
|
73 | 73 | # distance and tag name |
|
74 | 74 | cache[cachename] = {-1: (0, 0, ['null'])} |
|
75 | 75 | latesttags = cache[cachename] |
|
76 | 76 | |
|
77 | 77 | rev = ctx.rev() |
|
78 | 78 | todo = [rev] |
|
79 | 79 | while todo: |
|
80 | 80 | rev = todo.pop() |
|
81 | 81 | if rev in latesttags: |
|
82 | 82 | continue |
|
83 | 83 | ctx = repo[rev] |
|
84 | 84 | tags = [t for t in ctx.tags() |
|
85 | 85 | if (repo.tagtype(t) and repo.tagtype(t) != 'local' |
|
86 | 86 | and match(t))] |
|
87 | 87 | if tags: |
|
88 | 88 | latesttags[rev] = ctx.date()[0], 0, [t for t in sorted(tags)] |
|
89 | 89 | continue |
|
90 | 90 | try: |
|
91 | 91 | ptags = [latesttags[p.rev()] for p in ctx.parents()] |
|
92 | 92 | if len(ptags) > 1: |
|
93 | 93 | if ptags[0][2] == ptags[1][2]: |
|
94 | 94 | # The tuples are laid out so the right one can be found by |
|
95 | 95 | # comparison in this case. |
|
96 | 96 | pdate, pdist, ptag = max(ptags) |
|
97 | 97 | else: |
|
98 | 98 | def key(x): |
|
99 | 99 | changessincetag = len(repo.revs('only(%d, %s)', |
|
100 | 100 | ctx.rev(), x[2][0])) |
|
101 | 101 | # Smallest number of changes since tag wins. Date is |
|
102 | 102 | # used as tiebreaker. |
|
103 | 103 | return [-changessincetag, x[0]] |
|
104 | 104 | pdate, pdist, ptag = max(ptags, key=key) |
|
105 | 105 | else: |
|
106 | 106 | pdate, pdist, ptag = ptags[0] |
|
107 | 107 | except KeyError: |
|
108 | 108 | # Cache miss - recurse |
|
109 | 109 | todo.append(rev) |
|
110 | 110 | todo.extend(p.rev() for p in ctx.parents()) |
|
111 | 111 | continue |
|
112 | 112 | latesttags[rev] = pdate, pdist + 1, ptag |
|
113 | 113 | return latesttags[rev] |
|
114 | 114 | |
|
115 | 115 | def getrenamedfn(repo, endrev=None): |
|
116 | 116 | rcache = {} |
|
117 | 117 | if endrev is None: |
|
118 | 118 | endrev = len(repo) |
|
119 | 119 | |
|
120 | 120 | def getrenamed(fn, rev): |
|
121 | 121 | '''looks up all renames for a file (up to endrev) the first |
|
122 | 122 | time the file is given. It indexes on the changerev and only |
|
123 | 123 | parses the manifest if linkrev != changerev. |
|
124 | 124 | Returns rename info for fn at changerev rev.''' |
|
125 | 125 | if fn not in rcache: |
|
126 | 126 | rcache[fn] = {} |
|
127 | 127 | fl = repo.file(fn) |
|
128 | 128 | for i in fl: |
|
129 | 129 | lr = fl.linkrev(i) |
|
130 | 130 | renamed = fl.renamed(fl.node(i)) |
|
131 | 131 | rcache[fn][lr] = renamed |
|
132 | 132 | if lr >= endrev: |
|
133 | 133 | break |
|
134 | 134 | if rev in rcache[fn]: |
|
135 | 135 | return rcache[fn][rev] |
|
136 | 136 | |
|
137 | 137 | # If linkrev != rev (i.e. rev not found in rcache) fallback to |
|
138 | 138 | # filectx logic. |
|
139 | 139 | try: |
|
140 | 140 | return repo[rev][fn].renamed() |
|
141 | 141 | except error.LookupError: |
|
142 | 142 | return None |
|
143 | 143 | |
|
144 | 144 | return getrenamed |
|
145 | 145 | |
|
146 | 146 | def getlogcolumns(): |
|
147 | 147 | """Return a dict of log column labels""" |
|
148 | 148 | _ = pycompat.identity # temporarily disable gettext |
|
149 | 149 | # i18n: column positioning for "hg log" |
|
150 | 150 | columns = _('bookmark: %s\n' |
|
151 | 151 | 'branch: %s\n' |
|
152 | 152 | 'changeset: %s\n' |
|
153 | 153 | 'copies: %s\n' |
|
154 | 154 | 'date: %s\n' |
|
155 | 155 | 'extra: %s=%s\n' |
|
156 | 156 | 'files+: %s\n' |
|
157 | 157 | 'files-: %s\n' |
|
158 | 158 | 'files: %s\n' |
|
159 | 159 | 'instability: %s\n' |
|
160 | 160 | 'manifest: %s\n' |
|
161 | 161 | 'obsolete: %s\n' |
|
162 | 162 | 'parent: %s\n' |
|
163 | 163 | 'phase: %s\n' |
|
164 | 164 | 'summary: %s\n' |
|
165 | 165 | 'tag: %s\n' |
|
166 | 166 | 'user: %s\n') |
|
167 | 167 | return dict(zip([s.split(':', 1)[0] for s in columns.splitlines()], |
|
168 | 168 | i18n._(columns).splitlines(True))) |
|
169 | 169 | |
|
170 | 170 | # default templates internally used for rendering of lists |
|
171 | 171 | defaulttempl = { |
|
172 | 172 | 'parent': '{rev}:{node|formatnode} ', |
|
173 | 173 | 'manifest': '{rev}:{node|formatnode}', |
|
174 | 174 | 'file_copy': '{name} ({source})', |
|
175 | 175 | 'envvar': '{key}={value}', |
|
176 | 176 | 'extra': '{key}={value|stringescape}' |
|
177 | 177 | } |
|
178 | 178 | # filecopy is preserved for compatibility reasons |
|
179 | 179 | defaulttempl['filecopy'] = defaulttempl['file_copy'] |
|
180 | 180 | |
|
181 | 181 | # keywords are callables (see registrar.templatekeyword for details) |
|
182 | 182 | keywords = {} |
|
183 | 183 | templatekeyword = registrar.templatekeyword(keywords) |
|
184 | 184 | |
|
185 | 185 | @templatekeyword('author', requires={'ctx'}) |
|
186 | 186 | def showauthor(context, mapping): |
|
187 | 187 | """String. The unmodified author of the changeset.""" |
|
188 | 188 | ctx = context.resource(mapping, 'ctx') |
|
189 | 189 | return ctx.user() |
|
190 | 190 | |
|
191 | 191 | @templatekeyword('bisect', requires={'repo', 'ctx'}) |
|
192 | 192 | def showbisect(context, mapping): |
|
193 | 193 | """String. The changeset bisection status.""" |
|
194 | 194 | repo = context.resource(mapping, 'repo') |
|
195 | 195 | ctx = context.resource(mapping, 'ctx') |
|
196 | 196 | return hbisect.label(repo, ctx.node()) |
|
197 | 197 | |
|
198 | 198 | @templatekeyword('branch', requires={'ctx'}) |
|
199 | 199 | def showbranch(context, mapping): |
|
200 | 200 | """String. The name of the branch on which the changeset was |
|
201 | 201 | committed. |
|
202 | 202 | """ |
|
203 | 203 | ctx = context.resource(mapping, 'ctx') |
|
204 | 204 | return ctx.branch() |
|
205 | 205 | |
|
206 | 206 | @templatekeyword('branches', requires={'ctx', 'templ'}) |
|
207 | 207 | def showbranches(context, mapping): |
|
208 | 208 | """List of strings. The name of the branch on which the |
|
209 | 209 | changeset was committed. Will be empty if the branch name was |
|
210 | 210 | default. (DEPRECATED) |
|
211 | 211 | """ |
|
212 | 212 | ctx = context.resource(mapping, 'ctx') |
|
213 | 213 | branch = ctx.branch() |
|
214 | 214 | if branch != 'default': |
|
215 | 215 | return compatlist(context, mapping, 'branch', [branch], |
|
216 | 216 | plural='branches') |
|
217 | 217 | return compatlist(context, mapping, 'branch', [], plural='branches') |
|
218 | 218 | |
|
219 | 219 | @templatekeyword('bookmarks', requires={'repo', 'ctx', 'templ'}) |
|
220 | 220 | def showbookmarks(context, mapping): |
|
221 | 221 | """List of strings. Any bookmarks associated with the |
|
222 | 222 | changeset. Also sets 'active', the name of the active bookmark. |
|
223 | 223 | """ |
|
224 | 224 | repo = context.resource(mapping, 'repo') |
|
225 | 225 | ctx = context.resource(mapping, 'ctx') |
|
226 | 226 | templ = context.resource(mapping, 'templ') |
|
227 | 227 | bookmarks = ctx.bookmarks() |
|
228 | 228 | active = repo._activebookmark |
|
229 | 229 | makemap = lambda v: {'bookmark': v, 'active': active, 'current': active} |
|
230 | 230 | f = _showlist('bookmark', bookmarks, templ, mapping) |
|
231 | 231 | return _hybrid(f, bookmarks, makemap, pycompat.identity) |
|
232 | 232 | |
|
233 | 233 | @templatekeyword('children', requires={'ctx', 'templ'}) |
|
234 | 234 | def showchildren(context, mapping): |
|
235 | 235 | """List of strings. The children of the changeset.""" |
|
236 | 236 | ctx = context.resource(mapping, 'ctx') |
|
237 | 237 | childrevs = ['%d:%s' % (cctx.rev(), cctx) for cctx in ctx.children()] |
|
238 | 238 | return compatlist(context, mapping, 'children', childrevs, element='child') |
|
239 | 239 | |
|
240 | 240 | # Deprecated, but kept alive for help generation a purpose. |
|
241 | 241 | @templatekeyword('currentbookmark', requires={'repo', 'ctx'}) |
|
242 | 242 | def showcurrentbookmark(context, mapping): |
|
243 | 243 | """String. The active bookmark, if it is associated with the changeset. |
|
244 | 244 | (DEPRECATED)""" |
|
245 | 245 | return showactivebookmark(context, mapping) |
|
246 | 246 | |
|
247 | 247 | @templatekeyword('activebookmark', requires={'repo', 'ctx'}) |
|
248 | 248 | def showactivebookmark(context, mapping): |
|
249 | 249 | """String. The active bookmark, if it is associated with the changeset.""" |
|
250 | 250 | repo = context.resource(mapping, 'repo') |
|
251 | 251 | ctx = context.resource(mapping, 'ctx') |
|
252 | 252 | active = repo._activebookmark |
|
253 | 253 | if active and active in ctx.bookmarks(): |
|
254 | 254 | return active |
|
255 | 255 | return '' |
|
256 | 256 | |
|
257 | 257 | @templatekeyword('date', requires={'ctx'}) |
|
258 | 258 | def showdate(context, mapping): |
|
259 | 259 | """Date information. The date when the changeset was committed.""" |
|
260 | 260 | ctx = context.resource(mapping, 'ctx') |
|
261 | 261 | return ctx.date() |
|
262 | 262 | |
|
263 | 263 | @templatekeyword('desc', requires={'ctx'}) |
|
264 | 264 | def showdescription(context, mapping): |
|
265 | 265 | """String. The text of the changeset description.""" |
|
266 | 266 | ctx = context.resource(mapping, 'ctx') |
|
267 | 267 | s = ctx.description() |
|
268 | 268 | if isinstance(s, encoding.localstr): |
|
269 | 269 | # try hard to preserve utf-8 bytes |
|
270 | 270 | return encoding.tolocal(encoding.fromlocal(s).strip()) |
|
271 | 271 | else: |
|
272 | 272 | return s.strip() |
|
273 | 273 | |
|
274 | 274 | @templatekeyword('diffstat', requires={'ctx'}) |
|
275 | 275 | def showdiffstat(context, mapping): |
|
276 | 276 | """String. Statistics of changes with the following format: |
|
277 | 277 | "modified files: +added/-removed lines" |
|
278 | 278 | """ |
|
279 | 279 | ctx = context.resource(mapping, 'ctx') |
|
280 | 280 | stats = patch.diffstatdata(util.iterlines(ctx.diff(noprefix=False))) |
|
281 | 281 | maxname, maxtotal, adds, removes, binary = patch.diffstatsum(stats) |
|
282 | 282 | return '%d: +%d/-%d' % (len(stats), adds, removes) |
|
283 | 283 | |
|
284 | 284 | @templatekeyword('envvars', requires={'ui', 'templ'}) |
|
285 | 285 | def showenvvars(context, mapping): |
|
286 | 286 | """A dictionary of environment variables. (EXPERIMENTAL)""" |
|
287 | 287 | ui = context.resource(mapping, 'ui') |
|
288 | 288 | env = ui.exportableenviron() |
|
289 | 289 | env = util.sortdict((k, env[k]) for k in sorted(env)) |
|
290 | 290 | return compatdict(context, mapping, 'envvar', env, plural='envvars') |
|
291 | 291 | |
|
292 | 292 | @templatekeyword('extras', requires={'ctx', 'templ'}) |
|
293 | 293 | def showextras(context, mapping): |
|
294 | 294 | """List of dicts with key, value entries of the 'extras' |
|
295 | 295 | field of this changeset.""" |
|
296 | 296 | ctx = context.resource(mapping, 'ctx') |
|
297 | 297 | templ = context.resource(mapping, 'templ') |
|
298 | 298 | extras = ctx.extra() |
|
299 | 299 | extras = util.sortdict((k, extras[k]) for k in sorted(extras)) |
|
300 | 300 | makemap = lambda k: {'key': k, 'value': extras[k]} |
|
301 | 301 | c = [makemap(k) for k in extras] |
|
302 | 302 | f = _showlist('extra', c, templ, mapping, plural='extras') |
|
303 | 303 | return _hybrid(f, extras, makemap, |
|
304 | 304 | lambda k: '%s=%s' % (k, util.escapestr(extras[k]))) |
|
305 | 305 | |
|
306 | 306 | def _showfilesbystat(context, mapping, name, index): |
|
307 | 307 | repo = context.resource(mapping, 'repo') |
|
308 | 308 | ctx = context.resource(mapping, 'ctx') |
|
309 | 309 | revcache = context.resource(mapping, 'revcache') |
|
310 | 310 | if 'files' not in revcache: |
|
311 | 311 | revcache['files'] = repo.status(ctx.p1(), ctx)[:3] |
|
312 | 312 | files = revcache['files'][index] |
|
313 | 313 | return compatlist(context, mapping, name, files, element='file') |
|
314 | 314 | |
|
315 | 315 | @templatekeyword('file_adds', requires={'repo', 'ctx', 'revcache', 'templ'}) |
|
316 | 316 | def showfileadds(context, mapping): |
|
317 | 317 | """List of strings. Files added by this changeset.""" |
|
318 | 318 | return _showfilesbystat(context, mapping, 'file_add', 1) |
|
319 | 319 | |
|
320 | 320 | @templatekeyword('file_copies', |
|
321 | 321 | requires={'repo', 'ctx', 'cache', 'revcache', 'templ'}) |
|
322 | 322 | def showfilecopies(context, mapping): |
|
323 | 323 | """List of strings. Files copied in this changeset with |
|
324 | 324 | their sources. |
|
325 | 325 | """ |
|
326 | 326 | repo = context.resource(mapping, 'repo') |
|
327 | 327 | ctx = context.resource(mapping, 'ctx') |
|
328 | 328 | cache = context.resource(mapping, 'cache') |
|
329 | 329 | copies = context.resource(mapping, 'revcache').get('copies') |
|
330 | 330 | if copies is None: |
|
331 | 331 | if 'getrenamed' not in cache: |
|
332 | 332 | cache['getrenamed'] = getrenamedfn(repo) |
|
333 | 333 | copies = [] |
|
334 | 334 | getrenamed = cache['getrenamed'] |
|
335 | 335 | for fn in ctx.files(): |
|
336 | 336 | rename = getrenamed(fn, ctx.rev()) |
|
337 | 337 | if rename: |
|
338 | 338 | copies.append((fn, rename[0])) |
|
339 | 339 | |
|
340 | 340 | copies = util.sortdict(copies) |
|
341 | 341 | return compatdict(context, mapping, 'file_copy', copies, |
|
342 | 342 | key='name', value='source', fmt='%s (%s)', |
|
343 | 343 | plural='file_copies') |
|
344 | 344 | |
|
345 | 345 | # showfilecopiesswitch() displays file copies only if copy records are |
|
346 | 346 | # provided before calling the templater, usually with a --copies |
|
347 | 347 | # command line switch. |
|
348 | 348 | @templatekeyword('file_copies_switch', requires={'revcache', 'templ'}) |
|
349 | 349 | def showfilecopiesswitch(context, mapping): |
|
350 | 350 | """List of strings. Like "file_copies" but displayed |
|
351 | 351 | only if the --copied switch is set. |
|
352 | 352 | """ |
|
353 | 353 | copies = context.resource(mapping, 'revcache').get('copies') or [] |
|
354 | 354 | copies = util.sortdict(copies) |
|
355 | 355 | return compatdict(context, mapping, 'file_copy', copies, |
|
356 | 356 | key='name', value='source', fmt='%s (%s)', |
|
357 | 357 | plural='file_copies') |
|
358 | 358 | |
|
359 | 359 | @templatekeyword('file_dels', requires={'repo', 'ctx', 'revcache', 'templ'}) |
|
360 | 360 | def showfiledels(context, mapping): |
|
361 | 361 | """List of strings. Files removed by this changeset.""" |
|
362 | 362 | return _showfilesbystat(context, mapping, 'file_del', 2) |
|
363 | 363 | |
|
364 | 364 | @templatekeyword('file_mods', requires={'repo', 'ctx', 'revcache', 'templ'}) |
|
365 | 365 | def showfilemods(context, mapping): |
|
366 | 366 | """List of strings. Files modified by this changeset.""" |
|
367 | 367 | return _showfilesbystat(context, mapping, 'file_mod', 0) |
|
368 | 368 | |
|
369 | 369 | @templatekeyword('files', requires={'ctx', 'templ'}) |
|
370 | 370 | def showfiles(context, mapping): |
|
371 | 371 | """List of strings. All files modified, added, or removed by this |
|
372 | 372 | changeset. |
|
373 | 373 | """ |
|
374 | 374 | ctx = context.resource(mapping, 'ctx') |
|
375 | 375 | return compatlist(context, mapping, 'file', ctx.files()) |
|
376 | 376 | |
|
377 | 377 | @templatekeyword('graphnode', requires={'repo', 'ctx'}) |
|
378 | 378 | def showgraphnode(context, mapping): |
|
379 | 379 | """String. The character representing the changeset node in an ASCII |
|
380 | 380 | revision graph.""" |
|
381 | 381 | repo = context.resource(mapping, 'repo') |
|
382 | 382 | ctx = context.resource(mapping, 'ctx') |
|
383 | 383 | return getgraphnode(repo, ctx) |
|
384 | 384 | |
|
385 | 385 | def getgraphnode(repo, ctx): |
|
386 | 386 | wpnodes = repo.dirstate.parents() |
|
387 | 387 | if wpnodes[1] == nullid: |
|
388 | 388 | wpnodes = wpnodes[:1] |
|
389 | 389 | if ctx.node() in wpnodes: |
|
390 | 390 | return '@' |
|
391 | 391 | elif ctx.obsolete(): |
|
392 | 392 | return 'x' |
|
393 | 393 | elif ctx.isunstable(): |
|
394 | 394 | return '*' |
|
395 | 395 | elif ctx.closesbranch(): |
|
396 | 396 | return '_' |
|
397 | 397 | else: |
|
398 | 398 | return 'o' |
|
399 | 399 | |
|
400 | 400 | @templatekeyword('graphwidth', requires=()) |
|
401 | 401 | def showgraphwidth(context, mapping): |
|
402 | 402 | """Integer. The width of the graph drawn by 'log --graph' or zero.""" |
|
403 | 403 | # just hosts documentation; should be overridden by template mapping |
|
404 | 404 | return 0 |
|
405 | 405 | |
|
406 | 406 | @templatekeyword('index', requires=()) |
|
407 | 407 | def showindex(context, mapping): |
|
408 | 408 | """Integer. The current iteration of the loop. (0 indexed)""" |
|
409 | 409 | # just hosts documentation; should be overridden by template mapping |
|
410 | 410 | raise error.Abort(_("can't use index in this context")) |
|
411 | 411 | |
|
412 | 412 | @templatekeyword('latesttag', requires={'repo', 'ctx', 'cache', 'templ'}) |
|
413 | 413 | def showlatesttag(context, mapping): |
|
414 | 414 | """List of strings. The global tags on the most recent globally |
|
415 | 415 | tagged ancestor of this changeset. If no such tags exist, the list |
|
416 | 416 | consists of the single string "null". |
|
417 | 417 | """ |
|
418 | 418 | return showlatesttags(context, mapping, None) |
|
419 | 419 | |
|
420 | 420 | def showlatesttags(context, mapping, pattern): |
|
421 | 421 | """helper method for the latesttag keyword and function""" |
|
422 | 422 | latesttags = getlatesttags(context, mapping, pattern) |
|
423 | 423 | |
|
424 | 424 | # latesttag[0] is an implementation detail for sorting csets on different |
|
425 | 425 | # branches in a stable manner- it is the date the tagged cset was created, |
|
426 | 426 | # not the date the tag was created. Therefore it isn't made visible here. |
|
427 | 427 | makemap = lambda v: { |
|
428 | 428 | 'changes': _showchangessincetag, |
|
429 | 429 | 'distance': latesttags[1], |
|
430 | 430 | 'latesttag': v, # BC with {latesttag % '{latesttag}'} |
|
431 | 431 | 'tag': v |
|
432 | 432 | } |
|
433 | 433 | |
|
434 | 434 | tags = latesttags[2] |
|
435 | 435 | templ = context.resource(mapping, 'templ') |
|
436 | 436 | f = _showlist('latesttag', tags, templ, mapping, separator=':') |
|
437 | 437 | return _hybrid(f, tags, makemap, pycompat.identity) |
|
438 | 438 | |
|
439 | 439 | @templatekeyword('latesttagdistance', requires={'repo', 'ctx', 'cache'}) |
|
440 | 440 | def showlatesttagdistance(context, mapping): |
|
441 | 441 | """Integer. Longest path to the latest tag.""" |
|
442 | 442 | return getlatesttags(context, mapping)[1] |
|
443 | 443 | |
|
444 | 444 | @templatekeyword('changessincelatesttag', requires={'repo', 'ctx', 'cache'}) |
|
445 | 445 | def showchangessincelatesttag(context, mapping): |
|
446 | 446 | """Integer. All ancestors not in the latest tag.""" |
|
447 | 447 | mapping = mapping.copy() |
|
448 | 448 | mapping['tag'] = getlatesttags(context, mapping)[2][0] |
|
449 | 449 | return _showchangessincetag(context, mapping) |
|
450 | 450 | |
|
451 | 451 | def _showchangessincetag(context, mapping): |
|
452 | 452 | repo = context.resource(mapping, 'repo') |
|
453 | 453 | ctx = context.resource(mapping, 'ctx') |
|
454 | 454 | offset = 0 |
|
455 | 455 | revs = [ctx.rev()] |
|
456 | 456 | tag = context.symbol(mapping, 'tag') |
|
457 | 457 | |
|
458 | 458 | # The only() revset doesn't currently support wdir() |
|
459 | 459 | if ctx.rev() is None: |
|
460 | 460 | offset = 1 |
|
461 | 461 | revs = [p.rev() for p in ctx.parents()] |
|
462 | 462 | |
|
463 | 463 | return len(repo.revs('only(%ld, %s)', revs, tag)) + offset |
|
464 | 464 | |
|
465 | 465 | # teach templater latesttags.changes is switched to (context, mapping) API |
|
466 | 466 | _showchangessincetag._requires = {'repo', 'ctx'} |
|
467 | 467 | |
|
468 | 468 | @templatekeyword('manifest', requires={'repo', 'ctx', 'templ'}) |
|
469 | 469 | def showmanifest(context, mapping): |
|
470 | 470 | repo = context.resource(mapping, 'repo') |
|
471 | 471 | ctx = context.resource(mapping, 'ctx') |
|
472 | 472 | templ = context.resource(mapping, 'templ') |
|
473 | 473 | mnode = ctx.manifestnode() |
|
474 | 474 | if mnode is None: |
|
475 | 475 | # just avoid crash, we might want to use the 'ff...' hash in future |
|
476 | 476 | return |
|
477 | 477 | mrev = repo.manifestlog._revlog.rev(mnode) |
|
478 | 478 | mhex = hex(mnode) |
|
479 | 479 | mapping = mapping.copy() |
|
480 | 480 | mapping.update({'rev': mrev, 'node': mhex}) |
|
481 |
f = templ('manifest', |
|
|
481 | f = templ.generate('manifest', mapping) | |
|
482 | 482 | # TODO: perhaps 'ctx' should be dropped from mapping because manifest |
|
483 | 483 | # rev and node are completely different from changeset's. |
|
484 | 484 | return _mappable(f, None, f, lambda x: {'rev': mrev, 'node': mhex}) |
|
485 | 485 | |
|
486 | 486 | @templatekeyword('obsfate', requires={'ui', 'repo', 'ctx', 'templ'}) |
|
487 | 487 | def showobsfate(context, mapping): |
|
488 | 488 | # this function returns a list containing pre-formatted obsfate strings. |
|
489 | 489 | # |
|
490 | 490 | # This function will be replaced by templates fragments when we will have |
|
491 | 491 | # the verbosity templatekw available. |
|
492 | 492 | succsandmarkers = showsuccsandmarkers(context, mapping) |
|
493 | 493 | |
|
494 | 494 | ui = context.resource(mapping, 'ui') |
|
495 | 495 | values = [] |
|
496 | 496 | |
|
497 | 497 | for x in succsandmarkers: |
|
498 | 498 | values.append(obsutil.obsfateprinter(x['successors'], x['markers'], ui)) |
|
499 | 499 | |
|
500 | 500 | return compatlist(context, mapping, "fate", values) |
|
501 | 501 | |
|
502 | 502 | def shownames(context, mapping, namespace): |
|
503 | 503 | """helper method to generate a template keyword for a namespace""" |
|
504 | 504 | repo = context.resource(mapping, 'repo') |
|
505 | 505 | ctx = context.resource(mapping, 'ctx') |
|
506 | 506 | ns = repo.names[namespace] |
|
507 | 507 | names = ns.names(repo, ctx.node()) |
|
508 | 508 | return compatlist(context, mapping, ns.templatename, names, |
|
509 | 509 | plural=namespace) |
|
510 | 510 | |
|
511 | 511 | @templatekeyword('namespaces', requires={'repo', 'ctx', 'templ'}) |
|
512 | 512 | def shownamespaces(context, mapping): |
|
513 | 513 | """Dict of lists. Names attached to this changeset per |
|
514 | 514 | namespace.""" |
|
515 | 515 | repo = context.resource(mapping, 'repo') |
|
516 | 516 | ctx = context.resource(mapping, 'ctx') |
|
517 | 517 | templ = context.resource(mapping, 'templ') |
|
518 | 518 | |
|
519 | 519 | namespaces = util.sortdict() |
|
520 | 520 | def makensmapfn(ns): |
|
521 | 521 | # 'name' for iterating over namespaces, templatename for local reference |
|
522 | 522 | return lambda v: {'name': v, ns.templatename: v} |
|
523 | 523 | |
|
524 | 524 | for k, ns in repo.names.iteritems(): |
|
525 | 525 | names = ns.names(repo, ctx.node()) |
|
526 | 526 | f = _showlist('name', names, templ, mapping) |
|
527 | 527 | namespaces[k] = _hybrid(f, names, makensmapfn(ns), pycompat.identity) |
|
528 | 528 | |
|
529 | 529 | f = _showlist('namespace', list(namespaces), templ, mapping) |
|
530 | 530 | |
|
531 | 531 | def makemap(ns): |
|
532 | 532 | return { |
|
533 | 533 | 'namespace': ns, |
|
534 | 534 | 'names': namespaces[ns], |
|
535 | 535 | 'builtin': repo.names[ns].builtin, |
|
536 | 536 | 'colorname': repo.names[ns].colorname, |
|
537 | 537 | } |
|
538 | 538 | |
|
539 | 539 | return _hybrid(f, namespaces, makemap, pycompat.identity) |
|
540 | 540 | |
|
541 | 541 | @templatekeyword('node', requires={'ctx'}) |
|
542 | 542 | def shownode(context, mapping): |
|
543 | 543 | """String. The changeset identification hash, as a 40 hexadecimal |
|
544 | 544 | digit string. |
|
545 | 545 | """ |
|
546 | 546 | ctx = context.resource(mapping, 'ctx') |
|
547 | 547 | return ctx.hex() |
|
548 | 548 | |
|
549 | 549 | @templatekeyword('obsolete', requires={'ctx'}) |
|
550 | 550 | def showobsolete(context, mapping): |
|
551 | 551 | """String. Whether the changeset is obsolete. (EXPERIMENTAL)""" |
|
552 | 552 | ctx = context.resource(mapping, 'ctx') |
|
553 | 553 | if ctx.obsolete(): |
|
554 | 554 | return 'obsolete' |
|
555 | 555 | return '' |
|
556 | 556 | |
|
557 | 557 | @templatekeyword('peerurls', requires={'repo'}) |
|
558 | 558 | def showpeerurls(context, mapping): |
|
559 | 559 | """A dictionary of repository locations defined in the [paths] section |
|
560 | 560 | of your configuration file.""" |
|
561 | 561 | repo = context.resource(mapping, 'repo') |
|
562 | 562 | # see commands.paths() for naming of dictionary keys |
|
563 | 563 | paths = repo.ui.paths |
|
564 | 564 | urls = util.sortdict((k, p.rawloc) for k, p in sorted(paths.iteritems())) |
|
565 | 565 | def makemap(k): |
|
566 | 566 | p = paths[k] |
|
567 | 567 | d = {'name': k, 'url': p.rawloc} |
|
568 | 568 | d.update((o, v) for o, v in sorted(p.suboptions.iteritems())) |
|
569 | 569 | return d |
|
570 | 570 | return _hybrid(None, urls, makemap, lambda k: '%s=%s' % (k, urls[k])) |
|
571 | 571 | |
|
572 | 572 | @templatekeyword("predecessors", requires={'repo', 'ctx'}) |
|
573 | 573 | def showpredecessors(context, mapping): |
|
574 | 574 | """Returns the list if the closest visible successors. (EXPERIMENTAL)""" |
|
575 | 575 | repo = context.resource(mapping, 'repo') |
|
576 | 576 | ctx = context.resource(mapping, 'ctx') |
|
577 | 577 | predecessors = sorted(obsutil.closestpredecessors(repo, ctx.node())) |
|
578 | 578 | predecessors = map(hex, predecessors) |
|
579 | 579 | |
|
580 | 580 | return _hybrid(None, predecessors, |
|
581 | 581 | lambda x: {'ctx': repo[x], 'revcache': {}}, |
|
582 | 582 | lambda x: scmutil.formatchangeid(repo[x])) |
|
583 | 583 | |
|
584 | 584 | @templatekeyword('reporoot', requires={'repo'}) |
|
585 | 585 | def showreporoot(context, mapping): |
|
586 | 586 | """String. The root directory of the current repository.""" |
|
587 | 587 | repo = context.resource(mapping, 'repo') |
|
588 | 588 | return repo.root |
|
589 | 589 | |
|
590 | 590 | @templatekeyword("successorssets", requires={'repo', 'ctx'}) |
|
591 | 591 | def showsuccessorssets(context, mapping): |
|
592 | 592 | """Returns a string of sets of successors for a changectx. Format used |
|
593 | 593 | is: [ctx1, ctx2], [ctx3] if ctx has been splitted into ctx1 and ctx2 |
|
594 | 594 | while also diverged into ctx3. (EXPERIMENTAL)""" |
|
595 | 595 | repo = context.resource(mapping, 'repo') |
|
596 | 596 | ctx = context.resource(mapping, 'ctx') |
|
597 | 597 | if not ctx.obsolete(): |
|
598 | 598 | return '' |
|
599 | 599 | |
|
600 | 600 | ssets = obsutil.successorssets(repo, ctx.node(), closest=True) |
|
601 | 601 | ssets = [[hex(n) for n in ss] for ss in ssets] |
|
602 | 602 | |
|
603 | 603 | data = [] |
|
604 | 604 | for ss in ssets: |
|
605 | 605 | h = _hybrid(None, ss, lambda x: {'ctx': repo[x], 'revcache': {}}, |
|
606 | 606 | lambda x: scmutil.formatchangeid(repo[x])) |
|
607 | 607 | data.append(h) |
|
608 | 608 | |
|
609 | 609 | # Format the successorssets |
|
610 | 610 | def render(d): |
|
611 | 611 | t = [] |
|
612 | 612 | for i in d.gen(): |
|
613 | 613 | t.append(i) |
|
614 | 614 | return "".join(t) |
|
615 | 615 | |
|
616 | 616 | def gen(data): |
|
617 | 617 | yield "; ".join(render(d) for d in data) |
|
618 | 618 | |
|
619 | 619 | return _hybrid(gen(data), data, lambda x: {'successorset': x}, |
|
620 | 620 | pycompat.identity) |
|
621 | 621 | |
|
622 | 622 | @templatekeyword("succsandmarkers", requires={'repo', 'ctx', 'templ'}) |
|
623 | 623 | def showsuccsandmarkers(context, mapping): |
|
624 | 624 | """Returns a list of dict for each final successor of ctx. The dict |
|
625 | 625 | contains successors node id in "successors" keys and the list of |
|
626 | 626 | obs-markers from ctx to the set of successors in "markers". |
|
627 | 627 | (EXPERIMENTAL) |
|
628 | 628 | """ |
|
629 | 629 | repo = context.resource(mapping, 'repo') |
|
630 | 630 | ctx = context.resource(mapping, 'ctx') |
|
631 | 631 | templ = context.resource(mapping, 'templ') |
|
632 | 632 | |
|
633 | 633 | values = obsutil.successorsandmarkers(repo, ctx) |
|
634 | 634 | |
|
635 | 635 | if values is None: |
|
636 | 636 | values = [] |
|
637 | 637 | |
|
638 | 638 | # Format successors and markers to avoid exposing binary to templates |
|
639 | 639 | data = [] |
|
640 | 640 | for i in values: |
|
641 | 641 | # Format successors |
|
642 | 642 | successors = i['successors'] |
|
643 | 643 | |
|
644 | 644 | successors = [hex(n) for n in successors] |
|
645 | 645 | successors = _hybrid(None, successors, |
|
646 | 646 | lambda x: {'ctx': repo[x], 'revcache': {}}, |
|
647 | 647 | lambda x: scmutil.formatchangeid(repo[x])) |
|
648 | 648 | |
|
649 | 649 | # Format markers |
|
650 | 650 | finalmarkers = [] |
|
651 | 651 | for m in i['markers']: |
|
652 | 652 | hexprec = hex(m[0]) |
|
653 | 653 | hexsucs = tuple(hex(n) for n in m[1]) |
|
654 | 654 | hexparents = None |
|
655 | 655 | if m[5] is not None: |
|
656 | 656 | hexparents = tuple(hex(n) for n in m[5]) |
|
657 | 657 | newmarker = (hexprec, hexsucs) + m[2:5] + (hexparents,) + m[6:] |
|
658 | 658 | finalmarkers.append(newmarker) |
|
659 | 659 | |
|
660 | 660 | data.append({'successors': successors, 'markers': finalmarkers}) |
|
661 | 661 | |
|
662 | 662 | f = _showlist('succsandmarkers', data, templ, mapping) |
|
663 | 663 | return _hybrid(f, data, lambda x: x, pycompat.identity) |
|
664 | 664 | |
|
665 | 665 | @templatekeyword('p1rev', requires={'ctx'}) |
|
666 | 666 | def showp1rev(context, mapping): |
|
667 | 667 | """Integer. The repository-local revision number of the changeset's |
|
668 | 668 | first parent, or -1 if the changeset has no parents.""" |
|
669 | 669 | ctx = context.resource(mapping, 'ctx') |
|
670 | 670 | return ctx.p1().rev() |
|
671 | 671 | |
|
672 | 672 | @templatekeyword('p2rev', requires={'ctx'}) |
|
673 | 673 | def showp2rev(context, mapping): |
|
674 | 674 | """Integer. The repository-local revision number of the changeset's |
|
675 | 675 | second parent, or -1 if the changeset has no second parent.""" |
|
676 | 676 | ctx = context.resource(mapping, 'ctx') |
|
677 | 677 | return ctx.p2().rev() |
|
678 | 678 | |
|
679 | 679 | @templatekeyword('p1node', requires={'ctx'}) |
|
680 | 680 | def showp1node(context, mapping): |
|
681 | 681 | """String. The identification hash of the changeset's first parent, |
|
682 | 682 | as a 40 digit hexadecimal string. If the changeset has no parents, all |
|
683 | 683 | digits are 0.""" |
|
684 | 684 | ctx = context.resource(mapping, 'ctx') |
|
685 | 685 | return ctx.p1().hex() |
|
686 | 686 | |
|
687 | 687 | @templatekeyword('p2node', requires={'ctx'}) |
|
688 | 688 | def showp2node(context, mapping): |
|
689 | 689 | """String. The identification hash of the changeset's second |
|
690 | 690 | parent, as a 40 digit hexadecimal string. If the changeset has no second |
|
691 | 691 | parent, all digits are 0.""" |
|
692 | 692 | ctx = context.resource(mapping, 'ctx') |
|
693 | 693 | return ctx.p2().hex() |
|
694 | 694 | |
|
695 | 695 | @templatekeyword('parents', requires={'repo', 'ctx', 'templ'}) |
|
696 | 696 | def showparents(context, mapping): |
|
697 | 697 | """List of strings. The parents of the changeset in "rev:node" |
|
698 | 698 | format. If the changeset has only one "natural" parent (the predecessor |
|
699 | 699 | revision) nothing is shown.""" |
|
700 | 700 | repo = context.resource(mapping, 'repo') |
|
701 | 701 | ctx = context.resource(mapping, 'ctx') |
|
702 | 702 | templ = context.resource(mapping, 'templ') |
|
703 | 703 | pctxs = scmutil.meaningfulparents(repo, ctx) |
|
704 | 704 | prevs = [p.rev() for p in pctxs] |
|
705 | 705 | parents = [[('rev', p.rev()), |
|
706 | 706 | ('node', p.hex()), |
|
707 | 707 | ('phase', p.phasestr())] |
|
708 | 708 | for p in pctxs] |
|
709 | 709 | f = _showlist('parent', parents, templ, mapping) |
|
710 | 710 | return _hybrid(f, prevs, lambda x: {'ctx': repo[x], 'revcache': {}}, |
|
711 | 711 | lambda x: scmutil.formatchangeid(repo[x]), keytype=int) |
|
712 | 712 | |
|
713 | 713 | @templatekeyword('phase', requires={'ctx'}) |
|
714 | 714 | def showphase(context, mapping): |
|
715 | 715 | """String. The changeset phase name.""" |
|
716 | 716 | ctx = context.resource(mapping, 'ctx') |
|
717 | 717 | return ctx.phasestr() |
|
718 | 718 | |
|
719 | 719 | @templatekeyword('phaseidx', requires={'ctx'}) |
|
720 | 720 | def showphaseidx(context, mapping): |
|
721 | 721 | """Integer. The changeset phase index. (ADVANCED)""" |
|
722 | 722 | ctx = context.resource(mapping, 'ctx') |
|
723 | 723 | return ctx.phase() |
|
724 | 724 | |
|
725 | 725 | @templatekeyword('rev', requires={'ctx'}) |
|
726 | 726 | def showrev(context, mapping): |
|
727 | 727 | """Integer. The repository-local changeset revision number.""" |
|
728 | 728 | ctx = context.resource(mapping, 'ctx') |
|
729 | 729 | return scmutil.intrev(ctx) |
|
730 | 730 | |
|
731 | 731 | def showrevslist(context, mapping, name, revs): |
|
732 | 732 | """helper to generate a list of revisions in which a mapped template will |
|
733 | 733 | be evaluated""" |
|
734 | 734 | repo = context.resource(mapping, 'repo') |
|
735 | 735 | templ = context.resource(mapping, 'templ') |
|
736 | 736 | f = _showlist(name, ['%d' % r for r in revs], templ, mapping) |
|
737 | 737 | return _hybrid(f, revs, |
|
738 | 738 | lambda x: {name: x, 'ctx': repo[x], 'revcache': {}}, |
|
739 | 739 | pycompat.identity, keytype=int) |
|
740 | 740 | |
|
741 | 741 | @templatekeyword('subrepos', requires={'ctx', 'templ'}) |
|
742 | 742 | def showsubrepos(context, mapping): |
|
743 | 743 | """List of strings. Updated subrepositories in the changeset.""" |
|
744 | 744 | ctx = context.resource(mapping, 'ctx') |
|
745 | 745 | substate = ctx.substate |
|
746 | 746 | if not substate: |
|
747 | 747 | return compatlist(context, mapping, 'subrepo', []) |
|
748 | 748 | psubstate = ctx.parents()[0].substate or {} |
|
749 | 749 | subrepos = [] |
|
750 | 750 | for sub in substate: |
|
751 | 751 | if sub not in psubstate or substate[sub] != psubstate[sub]: |
|
752 | 752 | subrepos.append(sub) # modified or newly added in ctx |
|
753 | 753 | for sub in psubstate: |
|
754 | 754 | if sub not in substate: |
|
755 | 755 | subrepos.append(sub) # removed in ctx |
|
756 | 756 | return compatlist(context, mapping, 'subrepo', sorted(subrepos)) |
|
757 | 757 | |
|
758 | 758 | # don't remove "showtags" definition, even though namespaces will put |
|
759 | 759 | # a helper function for "tags" keyword into "keywords" map automatically, |
|
760 | 760 | # because online help text is built without namespaces initialization |
|
761 | 761 | @templatekeyword('tags', requires={'repo', 'ctx', 'templ'}) |
|
762 | 762 | def showtags(context, mapping): |
|
763 | 763 | """List of strings. Any tags associated with the changeset.""" |
|
764 | 764 | return shownames(context, mapping, 'tags') |
|
765 | 765 | |
|
766 | 766 | @templatekeyword('termwidth', requires={'ui'}) |
|
767 | 767 | def showtermwidth(context, mapping): |
|
768 | 768 | """Integer. The width of the current terminal.""" |
|
769 | 769 | ui = context.resource(mapping, 'ui') |
|
770 | 770 | return ui.termwidth() |
|
771 | 771 | |
|
772 | 772 | @templatekeyword('instabilities', requires={'ctx', 'templ'}) |
|
773 | 773 | def showinstabilities(context, mapping): |
|
774 | 774 | """List of strings. Evolution instabilities affecting the changeset. |
|
775 | 775 | (EXPERIMENTAL) |
|
776 | 776 | """ |
|
777 | 777 | ctx = context.resource(mapping, 'ctx') |
|
778 | 778 | return compatlist(context, mapping, 'instability', ctx.instabilities(), |
|
779 | 779 | plural='instabilities') |
|
780 | 780 | |
|
781 | 781 | @templatekeyword('verbosity', requires={'ui'}) |
|
782 | 782 | def showverbosity(context, mapping): |
|
783 | 783 | """String. The current output verbosity in 'debug', 'quiet', 'verbose', |
|
784 | 784 | or ''.""" |
|
785 | 785 | ui = context.resource(mapping, 'ui') |
|
786 | 786 | # see logcmdutil.changesettemplater for priority of these flags |
|
787 | 787 | if ui.debugflag: |
|
788 | 788 | return 'debug' |
|
789 | 789 | elif ui.quiet: |
|
790 | 790 | return 'quiet' |
|
791 | 791 | elif ui.verbose: |
|
792 | 792 | return 'verbose' |
|
793 | 793 | return '' |
|
794 | 794 | |
|
795 | 795 | def loadkeyword(ui, extname, registrarobj): |
|
796 | 796 | """Load template keyword from specified registrarobj |
|
797 | 797 | """ |
|
798 | 798 | for name, func in registrarobj._table.iteritems(): |
|
799 | 799 | keywords[name] = func |
|
800 | 800 | |
|
801 | 801 | # tell hggettext to extract docstrings from these functions: |
|
802 | 802 | i18nfunctions = keywords.values() |
@@ -1,842 +1,842 b'' | |||
|
1 | 1 | # templater.py - template expansion for output |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | """Slightly complicated template engine for commands and hgweb |
|
9 | 9 | |
|
10 | 10 | This module provides low-level interface to the template engine. See the |
|
11 | 11 | formatter and cmdutil modules if you are looking for high-level functions |
|
12 | 12 | such as ``cmdutil.rendertemplate(ctx, tmpl)``. |
|
13 | 13 | |
|
14 | 14 | Internal Data Types |
|
15 | 15 | ------------------- |
|
16 | 16 | |
|
17 | 17 | Template keywords and functions take a dictionary of current symbols and |
|
18 | 18 | resources (a "mapping") and return result. Inputs and outputs must be one |
|
19 | 19 | of the following data types: |
|
20 | 20 | |
|
21 | 21 | bytes |
|
22 | 22 | a byte string, which is generally a human-readable text in local encoding. |
|
23 | 23 | |
|
24 | 24 | generator |
|
25 | 25 | a lazily-evaluated byte string, which is a possibly nested generator of |
|
26 | 26 | values of any printable types, and will be folded by ``stringify()`` |
|
27 | 27 | or ``flatten()``. |
|
28 | 28 | |
|
29 | 29 | BUG: hgweb overloads this type for mappings (i.e. some hgweb keywords |
|
30 | 30 | returns a generator of dicts.) |
|
31 | 31 | |
|
32 | 32 | None |
|
33 | 33 | sometimes represents an empty value, which can be stringified to ''. |
|
34 | 34 | |
|
35 | 35 | True, False, int, float |
|
36 | 36 | can be stringified as such. |
|
37 | 37 | |
|
38 | 38 | date tuple |
|
39 | 39 | a (unixtime, offset) tuple, which produces no meaningful output by itself. |
|
40 | 40 | |
|
41 | 41 | hybrid |
|
42 | 42 | represents a list/dict of printable values, which can also be converted |
|
43 | 43 | to mappings by % operator. |
|
44 | 44 | |
|
45 | 45 | mappable |
|
46 | 46 | represents a scalar printable value, also supports % operator. |
|
47 | 47 | """ |
|
48 | 48 | |
|
49 | 49 | from __future__ import absolute_import, print_function |
|
50 | 50 | |
|
51 | 51 | import os |
|
52 | 52 | |
|
53 | 53 | from .i18n import _ |
|
54 | 54 | from . import ( |
|
55 | 55 | config, |
|
56 | 56 | encoding, |
|
57 | 57 | error, |
|
58 | 58 | parser, |
|
59 | 59 | pycompat, |
|
60 | 60 | templatefilters, |
|
61 | 61 | templatefuncs, |
|
62 | 62 | templateutil, |
|
63 | 63 | util, |
|
64 | 64 | ) |
|
65 | 65 | |
|
66 | 66 | # template parsing |
|
67 | 67 | |
|
68 | 68 | elements = { |
|
69 | 69 | # token-type: binding-strength, primary, prefix, infix, suffix |
|
70 | 70 | "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None), |
|
71 | 71 | ".": (18, None, None, (".", 18), None), |
|
72 | 72 | "%": (15, None, None, ("%", 15), None), |
|
73 | 73 | "|": (15, None, None, ("|", 15), None), |
|
74 | 74 | "*": (5, None, None, ("*", 5), None), |
|
75 | 75 | "/": (5, None, None, ("/", 5), None), |
|
76 | 76 | "+": (4, None, None, ("+", 4), None), |
|
77 | 77 | "-": (4, None, ("negate", 19), ("-", 4), None), |
|
78 | 78 | "=": (3, None, None, ("keyvalue", 3), None), |
|
79 | 79 | ",": (2, None, None, ("list", 2), None), |
|
80 | 80 | ")": (0, None, None, None, None), |
|
81 | 81 | "integer": (0, "integer", None, None, None), |
|
82 | 82 | "symbol": (0, "symbol", None, None, None), |
|
83 | 83 | "string": (0, "string", None, None, None), |
|
84 | 84 | "template": (0, "template", None, None, None), |
|
85 | 85 | "end": (0, None, None, None, None), |
|
86 | 86 | } |
|
87 | 87 | |
|
88 | 88 | def tokenize(program, start, end, term=None): |
|
89 | 89 | """Parse a template expression into a stream of tokens, which must end |
|
90 | 90 | with term if specified""" |
|
91 | 91 | pos = start |
|
92 | 92 | program = pycompat.bytestr(program) |
|
93 | 93 | while pos < end: |
|
94 | 94 | c = program[pos] |
|
95 | 95 | if c.isspace(): # skip inter-token whitespace |
|
96 | 96 | pass |
|
97 | 97 | elif c in "(=,).%|+-*/": # handle simple operators |
|
98 | 98 | yield (c, None, pos) |
|
99 | 99 | elif c in '"\'': # handle quoted templates |
|
100 | 100 | s = pos + 1 |
|
101 | 101 | data, pos = _parsetemplate(program, s, end, c) |
|
102 | 102 | yield ('template', data, s) |
|
103 | 103 | pos -= 1 |
|
104 | 104 | elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'): |
|
105 | 105 | # handle quoted strings |
|
106 | 106 | c = program[pos + 1] |
|
107 | 107 | s = pos = pos + 2 |
|
108 | 108 | while pos < end: # find closing quote |
|
109 | 109 | d = program[pos] |
|
110 | 110 | if d == '\\': # skip over escaped characters |
|
111 | 111 | pos += 2 |
|
112 | 112 | continue |
|
113 | 113 | if d == c: |
|
114 | 114 | yield ('string', program[s:pos], s) |
|
115 | 115 | break |
|
116 | 116 | pos += 1 |
|
117 | 117 | else: |
|
118 | 118 | raise error.ParseError(_("unterminated string"), s) |
|
119 | 119 | elif c.isdigit(): |
|
120 | 120 | s = pos |
|
121 | 121 | while pos < end: |
|
122 | 122 | d = program[pos] |
|
123 | 123 | if not d.isdigit(): |
|
124 | 124 | break |
|
125 | 125 | pos += 1 |
|
126 | 126 | yield ('integer', program[s:pos], s) |
|
127 | 127 | pos -= 1 |
|
128 | 128 | elif (c == '\\' and program[pos:pos + 2] in (br"\'", br'\"') |
|
129 | 129 | or c == 'r' and program[pos:pos + 3] in (br"r\'", br'r\"')): |
|
130 | 130 | # handle escaped quoted strings for compatibility with 2.9.2-3.4, |
|
131 | 131 | # where some of nested templates were preprocessed as strings and |
|
132 | 132 | # then compiled. therefore, \"...\" was allowed. (issue4733) |
|
133 | 133 | # |
|
134 | 134 | # processing flow of _evalifliteral() at 5ab28a2e9962: |
|
135 | 135 | # outer template string -> stringify() -> compiletemplate() |
|
136 | 136 | # ------------------------ ------------ ------------------ |
|
137 | 137 | # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}] |
|
138 | 138 | # ~~~~~~~~ |
|
139 | 139 | # escaped quoted string |
|
140 | 140 | if c == 'r': |
|
141 | 141 | pos += 1 |
|
142 | 142 | token = 'string' |
|
143 | 143 | else: |
|
144 | 144 | token = 'template' |
|
145 | 145 | quote = program[pos:pos + 2] |
|
146 | 146 | s = pos = pos + 2 |
|
147 | 147 | while pos < end: # find closing escaped quote |
|
148 | 148 | if program.startswith('\\\\\\', pos, end): |
|
149 | 149 | pos += 4 # skip over double escaped characters |
|
150 | 150 | continue |
|
151 | 151 | if program.startswith(quote, pos, end): |
|
152 | 152 | # interpret as if it were a part of an outer string |
|
153 | 153 | data = parser.unescapestr(program[s:pos]) |
|
154 | 154 | if token == 'template': |
|
155 | 155 | data = _parsetemplate(data, 0, len(data))[0] |
|
156 | 156 | yield (token, data, s) |
|
157 | 157 | pos += 1 |
|
158 | 158 | break |
|
159 | 159 | pos += 1 |
|
160 | 160 | else: |
|
161 | 161 | raise error.ParseError(_("unterminated string"), s) |
|
162 | 162 | elif c.isalnum() or c in '_': |
|
163 | 163 | s = pos |
|
164 | 164 | pos += 1 |
|
165 | 165 | while pos < end: # find end of symbol |
|
166 | 166 | d = program[pos] |
|
167 | 167 | if not (d.isalnum() or d == "_"): |
|
168 | 168 | break |
|
169 | 169 | pos += 1 |
|
170 | 170 | sym = program[s:pos] |
|
171 | 171 | yield ('symbol', sym, s) |
|
172 | 172 | pos -= 1 |
|
173 | 173 | elif c == term: |
|
174 | 174 | yield ('end', None, pos) |
|
175 | 175 | return |
|
176 | 176 | else: |
|
177 | 177 | raise error.ParseError(_("syntax error"), pos) |
|
178 | 178 | pos += 1 |
|
179 | 179 | if term: |
|
180 | 180 | raise error.ParseError(_("unterminated template expansion"), start) |
|
181 | 181 | yield ('end', None, pos) |
|
182 | 182 | |
|
183 | 183 | def _parsetemplate(tmpl, start, stop, quote=''): |
|
184 | 184 | r""" |
|
185 | 185 | >>> _parsetemplate(b'foo{bar}"baz', 0, 12) |
|
186 | 186 | ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12) |
|
187 | 187 | >>> _parsetemplate(b'foo{bar}"baz', 0, 12, quote=b'"') |
|
188 | 188 | ([('string', 'foo'), ('symbol', 'bar')], 9) |
|
189 | 189 | >>> _parsetemplate(b'foo"{bar}', 0, 9, quote=b'"') |
|
190 | 190 | ([('string', 'foo')], 4) |
|
191 | 191 | >>> _parsetemplate(br'foo\"bar"baz', 0, 12, quote=b'"') |
|
192 | 192 | ([('string', 'foo"'), ('string', 'bar')], 9) |
|
193 | 193 | >>> _parsetemplate(br'foo\\"bar', 0, 10, quote=b'"') |
|
194 | 194 | ([('string', 'foo\\')], 6) |
|
195 | 195 | """ |
|
196 | 196 | parsed = [] |
|
197 | 197 | for typ, val, pos in _scantemplate(tmpl, start, stop, quote): |
|
198 | 198 | if typ == 'string': |
|
199 | 199 | parsed.append((typ, val)) |
|
200 | 200 | elif typ == 'template': |
|
201 | 201 | parsed.append(val) |
|
202 | 202 | elif typ == 'end': |
|
203 | 203 | return parsed, pos |
|
204 | 204 | else: |
|
205 | 205 | raise error.ProgrammingError('unexpected type: %s' % typ) |
|
206 | 206 | raise error.ProgrammingError('unterminated scanning of template') |
|
207 | 207 | |
|
208 | 208 | def scantemplate(tmpl, raw=False): |
|
209 | 209 | r"""Scan (type, start, end) positions of outermost elements in template |
|
210 | 210 | |
|
211 | 211 | If raw=True, a backslash is not taken as an escape character just like |
|
212 | 212 | r'' string in Python. Note that this is different from r'' literal in |
|
213 | 213 | template in that no template fragment can appear in r'', e.g. r'{foo}' |
|
214 | 214 | is a literal '{foo}', but ('{foo}', raw=True) is a template expression |
|
215 | 215 | 'foo'. |
|
216 | 216 | |
|
217 | 217 | >>> list(scantemplate(b'foo{bar}"baz')) |
|
218 | 218 | [('string', 0, 3), ('template', 3, 8), ('string', 8, 12)] |
|
219 | 219 | >>> list(scantemplate(b'outer{"inner"}outer')) |
|
220 | 220 | [('string', 0, 5), ('template', 5, 14), ('string', 14, 19)] |
|
221 | 221 | >>> list(scantemplate(b'foo\\{escaped}')) |
|
222 | 222 | [('string', 0, 5), ('string', 5, 13)] |
|
223 | 223 | >>> list(scantemplate(b'foo\\{escaped}', raw=True)) |
|
224 | 224 | [('string', 0, 4), ('template', 4, 13)] |
|
225 | 225 | """ |
|
226 | 226 | last = None |
|
227 | 227 | for typ, val, pos in _scantemplate(tmpl, 0, len(tmpl), raw=raw): |
|
228 | 228 | if last: |
|
229 | 229 | yield last + (pos,) |
|
230 | 230 | if typ == 'end': |
|
231 | 231 | return |
|
232 | 232 | else: |
|
233 | 233 | last = (typ, pos) |
|
234 | 234 | raise error.ProgrammingError('unterminated scanning of template') |
|
235 | 235 | |
|
236 | 236 | def _scantemplate(tmpl, start, stop, quote='', raw=False): |
|
237 | 237 | """Parse template string into chunks of strings and template expressions""" |
|
238 | 238 | sepchars = '{' + quote |
|
239 | 239 | unescape = [parser.unescapestr, pycompat.identity][raw] |
|
240 | 240 | pos = start |
|
241 | 241 | p = parser.parser(elements) |
|
242 | 242 | try: |
|
243 | 243 | while pos < stop: |
|
244 | 244 | n = min((tmpl.find(c, pos, stop) for c in sepchars), |
|
245 | 245 | key=lambda n: (n < 0, n)) |
|
246 | 246 | if n < 0: |
|
247 | 247 | yield ('string', unescape(tmpl[pos:stop]), pos) |
|
248 | 248 | pos = stop |
|
249 | 249 | break |
|
250 | 250 | c = tmpl[n:n + 1] |
|
251 | 251 | bs = 0 # count leading backslashes |
|
252 | 252 | if not raw: |
|
253 | 253 | bs = (n - pos) - len(tmpl[pos:n].rstrip('\\')) |
|
254 | 254 | if bs % 2 == 1: |
|
255 | 255 | # escaped (e.g. '\{', '\\\{', but not '\\{') |
|
256 | 256 | yield ('string', unescape(tmpl[pos:n - 1]) + c, pos) |
|
257 | 257 | pos = n + 1 |
|
258 | 258 | continue |
|
259 | 259 | if n > pos: |
|
260 | 260 | yield ('string', unescape(tmpl[pos:n]), pos) |
|
261 | 261 | if c == quote: |
|
262 | 262 | yield ('end', None, n + 1) |
|
263 | 263 | return |
|
264 | 264 | |
|
265 | 265 | parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}')) |
|
266 | 266 | if not tmpl.startswith('}', pos): |
|
267 | 267 | raise error.ParseError(_("invalid token"), pos) |
|
268 | 268 | yield ('template', parseres, n) |
|
269 | 269 | pos += 1 |
|
270 | 270 | |
|
271 | 271 | if quote: |
|
272 | 272 | raise error.ParseError(_("unterminated string"), start) |
|
273 | 273 | except error.ParseError as inst: |
|
274 | 274 | if len(inst.args) > 1: # has location |
|
275 | 275 | loc = inst.args[1] |
|
276 | 276 | # Offset the caret location by the number of newlines before the |
|
277 | 277 | # location of the error, since we will replace one-char newlines |
|
278 | 278 | # with the two-char literal r'\n'. |
|
279 | 279 | offset = tmpl[:loc].count('\n') |
|
280 | 280 | tmpl = tmpl.replace('\n', br'\n') |
|
281 | 281 | # We want the caret to point to the place in the template that |
|
282 | 282 | # failed to parse, but in a hint we get a open paren at the |
|
283 | 283 | # start. Therefore, we print "loc + 1" spaces (instead of "loc") |
|
284 | 284 | # to line up the caret with the location of the error. |
|
285 | 285 | inst.hint = (tmpl + '\n' |
|
286 | 286 | + ' ' * (loc + 1 + offset) + '^ ' + _('here')) |
|
287 | 287 | raise |
|
288 | 288 | yield ('end', None, pos) |
|
289 | 289 | |
|
290 | 290 | def _unnesttemplatelist(tree): |
|
291 | 291 | """Expand list of templates to node tuple |
|
292 | 292 | |
|
293 | 293 | >>> def f(tree): |
|
294 | 294 | ... print(pycompat.sysstr(prettyformat(_unnesttemplatelist(tree)))) |
|
295 | 295 | >>> f((b'template', [])) |
|
296 | 296 | (string '') |
|
297 | 297 | >>> f((b'template', [(b'string', b'foo')])) |
|
298 | 298 | (string 'foo') |
|
299 | 299 | >>> f((b'template', [(b'string', b'foo'), (b'symbol', b'rev')])) |
|
300 | 300 | (template |
|
301 | 301 | (string 'foo') |
|
302 | 302 | (symbol 'rev')) |
|
303 | 303 | >>> f((b'template', [(b'symbol', b'rev')])) # template(rev) -> str |
|
304 | 304 | (template |
|
305 | 305 | (symbol 'rev')) |
|
306 | 306 | >>> f((b'template', [(b'template', [(b'string', b'foo')])])) |
|
307 | 307 | (string 'foo') |
|
308 | 308 | """ |
|
309 | 309 | if not isinstance(tree, tuple): |
|
310 | 310 | return tree |
|
311 | 311 | op = tree[0] |
|
312 | 312 | if op != 'template': |
|
313 | 313 | return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:]) |
|
314 | 314 | |
|
315 | 315 | assert len(tree) == 2 |
|
316 | 316 | xs = tuple(_unnesttemplatelist(x) for x in tree[1]) |
|
317 | 317 | if not xs: |
|
318 | 318 | return ('string', '') # empty template "" |
|
319 | 319 | elif len(xs) == 1 and xs[0][0] == 'string': |
|
320 | 320 | return xs[0] # fast path for string with no template fragment "x" |
|
321 | 321 | else: |
|
322 | 322 | return (op,) + xs |
|
323 | 323 | |
|
324 | 324 | def parse(tmpl): |
|
325 | 325 | """Parse template string into tree""" |
|
326 | 326 | parsed, pos = _parsetemplate(tmpl, 0, len(tmpl)) |
|
327 | 327 | assert pos == len(tmpl), 'unquoted template should be consumed' |
|
328 | 328 | return _unnesttemplatelist(('template', parsed)) |
|
329 | 329 | |
|
330 | 330 | def _parseexpr(expr): |
|
331 | 331 | """Parse a template expression into tree |
|
332 | 332 | |
|
333 | 333 | >>> _parseexpr(b'"foo"') |
|
334 | 334 | ('string', 'foo') |
|
335 | 335 | >>> _parseexpr(b'foo(bar)') |
|
336 | 336 | ('func', ('symbol', 'foo'), ('symbol', 'bar')) |
|
337 | 337 | >>> _parseexpr(b'foo(') |
|
338 | 338 | Traceback (most recent call last): |
|
339 | 339 | ... |
|
340 | 340 | ParseError: ('not a prefix: end', 4) |
|
341 | 341 | >>> _parseexpr(b'"foo" "bar"') |
|
342 | 342 | Traceback (most recent call last): |
|
343 | 343 | ... |
|
344 | 344 | ParseError: ('invalid token', 7) |
|
345 | 345 | """ |
|
346 | 346 | p = parser.parser(elements) |
|
347 | 347 | tree, pos = p.parse(tokenize(expr, 0, len(expr))) |
|
348 | 348 | if pos != len(expr): |
|
349 | 349 | raise error.ParseError(_('invalid token'), pos) |
|
350 | 350 | return _unnesttemplatelist(tree) |
|
351 | 351 | |
|
352 | 352 | def prettyformat(tree): |
|
353 | 353 | return parser.prettyformat(tree, ('integer', 'string', 'symbol')) |
|
354 | 354 | |
|
355 | 355 | def compileexp(exp, context, curmethods): |
|
356 | 356 | """Compile parsed template tree to (func, data) pair""" |
|
357 | 357 | if not exp: |
|
358 | 358 | raise error.ParseError(_("missing argument")) |
|
359 | 359 | t = exp[0] |
|
360 | 360 | if t in curmethods: |
|
361 | 361 | return curmethods[t](exp, context) |
|
362 | 362 | raise error.ParseError(_("unknown method '%s'") % t) |
|
363 | 363 | |
|
364 | 364 | # template evaluation |
|
365 | 365 | |
|
366 | 366 | def getsymbol(exp): |
|
367 | 367 | if exp[0] == 'symbol': |
|
368 | 368 | return exp[1] |
|
369 | 369 | raise error.ParseError(_("expected a symbol, got '%s'") % exp[0]) |
|
370 | 370 | |
|
371 | 371 | def getlist(x): |
|
372 | 372 | if not x: |
|
373 | 373 | return [] |
|
374 | 374 | if x[0] == 'list': |
|
375 | 375 | return getlist(x[1]) + [x[2]] |
|
376 | 376 | return [x] |
|
377 | 377 | |
|
378 | 378 | def gettemplate(exp, context): |
|
379 | 379 | """Compile given template tree or load named template from map file; |
|
380 | 380 | returns (func, data) pair""" |
|
381 | 381 | if exp[0] in ('template', 'string'): |
|
382 | 382 | return compileexp(exp, context, methods) |
|
383 | 383 | if exp[0] == 'symbol': |
|
384 | 384 | # unlike runsymbol(), here 'symbol' is always taken as template name |
|
385 | 385 | # even if it exists in mapping. this allows us to override mapping |
|
386 | 386 | # by web templates, e.g. 'changelogtag' is redefined in map file. |
|
387 | 387 | return context._load(exp[1]) |
|
388 | 388 | raise error.ParseError(_("expected template specifier")) |
|
389 | 389 | |
|
390 | 390 | def _runrecursivesymbol(context, mapping, key): |
|
391 | 391 | raise error.Abort(_("recursive reference '%s' in template") % key) |
|
392 | 392 | |
|
393 | 393 | def buildtemplate(exp, context): |
|
394 | 394 | ctmpl = [compileexp(e, context, methods) for e in exp[1:]] |
|
395 | 395 | return (templateutil.runtemplate, ctmpl) |
|
396 | 396 | |
|
397 | 397 | def buildfilter(exp, context): |
|
398 | 398 | n = getsymbol(exp[2]) |
|
399 | 399 | if n in context._filters: |
|
400 | 400 | filt = context._filters[n] |
|
401 | 401 | arg = compileexp(exp[1], context, methods) |
|
402 | 402 | return (templateutil.runfilter, (arg, filt)) |
|
403 | 403 | if n in context._funcs: |
|
404 | 404 | f = context._funcs[n] |
|
405 | 405 | args = _buildfuncargs(exp[1], context, methods, n, f._argspec) |
|
406 | 406 | return (f, args) |
|
407 | 407 | raise error.ParseError(_("unknown function '%s'") % n) |
|
408 | 408 | |
|
409 | 409 | def buildmap(exp, context): |
|
410 | 410 | darg = compileexp(exp[1], context, methods) |
|
411 | 411 | targ = gettemplate(exp[2], context) |
|
412 | 412 | return (templateutil.runmap, (darg, targ)) |
|
413 | 413 | |
|
414 | 414 | def buildmember(exp, context): |
|
415 | 415 | darg = compileexp(exp[1], context, methods) |
|
416 | 416 | memb = getsymbol(exp[2]) |
|
417 | 417 | return (templateutil.runmember, (darg, memb)) |
|
418 | 418 | |
|
419 | 419 | def buildnegate(exp, context): |
|
420 | 420 | arg = compileexp(exp[1], context, exprmethods) |
|
421 | 421 | return (templateutil.runnegate, arg) |
|
422 | 422 | |
|
423 | 423 | def buildarithmetic(exp, context, func): |
|
424 | 424 | left = compileexp(exp[1], context, exprmethods) |
|
425 | 425 | right = compileexp(exp[2], context, exprmethods) |
|
426 | 426 | return (templateutil.runarithmetic, (func, left, right)) |
|
427 | 427 | |
|
428 | 428 | def buildfunc(exp, context): |
|
429 | 429 | n = getsymbol(exp[1]) |
|
430 | 430 | if n in context._funcs: |
|
431 | 431 | f = context._funcs[n] |
|
432 | 432 | args = _buildfuncargs(exp[2], context, exprmethods, n, f._argspec) |
|
433 | 433 | return (f, args) |
|
434 | 434 | if n in context._filters: |
|
435 | 435 | args = _buildfuncargs(exp[2], context, exprmethods, n, argspec=None) |
|
436 | 436 | if len(args) != 1: |
|
437 | 437 | raise error.ParseError(_("filter %s expects one argument") % n) |
|
438 | 438 | f = context._filters[n] |
|
439 | 439 | return (templateutil.runfilter, (args[0], f)) |
|
440 | 440 | raise error.ParseError(_("unknown function '%s'") % n) |
|
441 | 441 | |
|
442 | 442 | def _buildfuncargs(exp, context, curmethods, funcname, argspec): |
|
443 | 443 | """Compile parsed tree of function arguments into list or dict of |
|
444 | 444 | (func, data) pairs |
|
445 | 445 | |
|
446 | 446 | >>> context = engine(lambda t: (templateutil.runsymbol, t)) |
|
447 | 447 | >>> def fargs(expr, argspec): |
|
448 | 448 | ... x = _parseexpr(expr) |
|
449 | 449 | ... n = getsymbol(x[1]) |
|
450 | 450 | ... return _buildfuncargs(x[2], context, exprmethods, n, argspec) |
|
451 | 451 | >>> list(fargs(b'a(l=1, k=2)', b'k l m').keys()) |
|
452 | 452 | ['l', 'k'] |
|
453 | 453 | >>> args = fargs(b'a(opts=1, k=2)', b'**opts') |
|
454 | 454 | >>> list(args.keys()), list(args[b'opts'].keys()) |
|
455 | 455 | (['opts'], ['opts', 'k']) |
|
456 | 456 | """ |
|
457 | 457 | def compiledict(xs): |
|
458 | 458 | return util.sortdict((k, compileexp(x, context, curmethods)) |
|
459 | 459 | for k, x in xs.iteritems()) |
|
460 | 460 | def compilelist(xs): |
|
461 | 461 | return [compileexp(x, context, curmethods) for x in xs] |
|
462 | 462 | |
|
463 | 463 | if not argspec: |
|
464 | 464 | # filter or function with no argspec: return list of positional args |
|
465 | 465 | return compilelist(getlist(exp)) |
|
466 | 466 | |
|
467 | 467 | # function with argspec: return dict of named args |
|
468 | 468 | _poskeys, varkey, _keys, optkey = argspec = parser.splitargspec(argspec) |
|
469 | 469 | treeargs = parser.buildargsdict(getlist(exp), funcname, argspec, |
|
470 | 470 | keyvaluenode='keyvalue', keynode='symbol') |
|
471 | 471 | compargs = util.sortdict() |
|
472 | 472 | if varkey: |
|
473 | 473 | compargs[varkey] = compilelist(treeargs.pop(varkey)) |
|
474 | 474 | if optkey: |
|
475 | 475 | compargs[optkey] = compiledict(treeargs.pop(optkey)) |
|
476 | 476 | compargs.update(compiledict(treeargs)) |
|
477 | 477 | return compargs |
|
478 | 478 | |
|
479 | 479 | def buildkeyvaluepair(exp, content): |
|
480 | 480 | raise error.ParseError(_("can't use a key-value pair in this context")) |
|
481 | 481 | |
|
482 | 482 | # methods to interpret function arguments or inner expressions (e.g. {_(x)}) |
|
483 | 483 | exprmethods = { |
|
484 | 484 | "integer": lambda e, c: (templateutil.runinteger, e[1]), |
|
485 | 485 | "string": lambda e, c: (templateutil.runstring, e[1]), |
|
486 | 486 | "symbol": lambda e, c: (templateutil.runsymbol, e[1]), |
|
487 | 487 | "template": buildtemplate, |
|
488 | 488 | "group": lambda e, c: compileexp(e[1], c, exprmethods), |
|
489 | 489 | ".": buildmember, |
|
490 | 490 | "|": buildfilter, |
|
491 | 491 | "%": buildmap, |
|
492 | 492 | "func": buildfunc, |
|
493 | 493 | "keyvalue": buildkeyvaluepair, |
|
494 | 494 | "+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b), |
|
495 | 495 | "-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b), |
|
496 | 496 | "negate": buildnegate, |
|
497 | 497 | "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b), |
|
498 | 498 | "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b), |
|
499 | 499 | } |
|
500 | 500 | |
|
501 | 501 | # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"}) |
|
502 | 502 | methods = exprmethods.copy() |
|
503 | 503 | methods["integer"] = exprmethods["symbol"] # '{1}' as variable |
|
504 | 504 | |
|
505 | 505 | class _aliasrules(parser.basealiasrules): |
|
506 | 506 | """Parsing and expansion rule set of template aliases""" |
|
507 | 507 | _section = _('template alias') |
|
508 | 508 | _parse = staticmethod(_parseexpr) |
|
509 | 509 | |
|
510 | 510 | @staticmethod |
|
511 | 511 | def _trygetfunc(tree): |
|
512 | 512 | """Return (name, args) if tree is func(...) or ...|filter; otherwise |
|
513 | 513 | None""" |
|
514 | 514 | if tree[0] == 'func' and tree[1][0] == 'symbol': |
|
515 | 515 | return tree[1][1], getlist(tree[2]) |
|
516 | 516 | if tree[0] == '|' and tree[2][0] == 'symbol': |
|
517 | 517 | return tree[2][1], [tree[1]] |
|
518 | 518 | |
|
519 | 519 | def expandaliases(tree, aliases): |
|
520 | 520 | """Return new tree of aliases are expanded""" |
|
521 | 521 | aliasmap = _aliasrules.buildmap(aliases) |
|
522 | 522 | return _aliasrules.expand(aliasmap, tree) |
|
523 | 523 | |
|
524 | 524 | # template engine |
|
525 | 525 | |
|
526 | 526 | def _flatten(thing): |
|
527 | 527 | '''yield a single stream from a possibly nested set of iterators''' |
|
528 | 528 | thing = templateutil.unwraphybrid(thing) |
|
529 | 529 | if isinstance(thing, bytes): |
|
530 | 530 | yield thing |
|
531 | 531 | elif isinstance(thing, str): |
|
532 | 532 | # We can only hit this on Python 3, and it's here to guard |
|
533 | 533 | # against infinite recursion. |
|
534 | 534 | raise error.ProgrammingError('Mercurial IO including templates is done' |
|
535 | 535 | ' with bytes, not strings, got %r' % thing) |
|
536 | 536 | elif thing is None: |
|
537 | 537 | pass |
|
538 | 538 | elif not util.safehasattr(thing, '__iter__'): |
|
539 | 539 | yield pycompat.bytestr(thing) |
|
540 | 540 | else: |
|
541 | 541 | for i in thing: |
|
542 | 542 | i = templateutil.unwraphybrid(i) |
|
543 | 543 | if isinstance(i, bytes): |
|
544 | 544 | yield i |
|
545 | 545 | elif i is None: |
|
546 | 546 | pass |
|
547 | 547 | elif not util.safehasattr(i, '__iter__'): |
|
548 | 548 | yield pycompat.bytestr(i) |
|
549 | 549 | else: |
|
550 | 550 | for j in _flatten(i): |
|
551 | 551 | yield j |
|
552 | 552 | |
|
553 | 553 | def unquotestring(s): |
|
554 | 554 | '''unwrap quotes if any; otherwise returns unmodified string''' |
|
555 | 555 | if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]: |
|
556 | 556 | return s |
|
557 | 557 | return s[1:-1] |
|
558 | 558 | |
|
559 | 559 | class engine(object): |
|
560 | 560 | '''template expansion engine. |
|
561 | 561 | |
|
562 | 562 | template expansion works like this. a map file contains key=value |
|
563 | 563 | pairs. if value is quoted, it is treated as string. otherwise, it |
|
564 | 564 | is treated as name of template file. |
|
565 | 565 | |
|
566 | 566 | templater is asked to expand a key in map. it looks up key, and |
|
567 | 567 | looks for strings like this: {foo}. it expands {foo} by looking up |
|
568 | 568 | foo in map, and substituting it. expansion is recursive: it stops |
|
569 | 569 | when there is no more {foo} to replace. |
|
570 | 570 | |
|
571 | 571 | expansion also allows formatting and filtering. |
|
572 | 572 | |
|
573 | 573 | format uses key to expand each item in list. syntax is |
|
574 | 574 | {key%format}. |
|
575 | 575 | |
|
576 | 576 | filter uses function to transform value. syntax is |
|
577 | 577 | {key|filter1|filter2|...}.''' |
|
578 | 578 | |
|
579 | 579 | def __init__(self, loader, filters=None, defaults=None, resources=None, |
|
580 | 580 | aliases=()): |
|
581 | 581 | self._loader = loader |
|
582 | 582 | if filters is None: |
|
583 | 583 | filters = {} |
|
584 | 584 | self._filters = filters |
|
585 | 585 | self._funcs = templatefuncs.funcs # make this a parameter if needed |
|
586 | 586 | if defaults is None: |
|
587 | 587 | defaults = {} |
|
588 | 588 | if resources is None: |
|
589 | 589 | resources = {} |
|
590 | 590 | self._defaults = defaults |
|
591 | 591 | self._resources = resources |
|
592 | 592 | self._aliasmap = _aliasrules.buildmap(aliases) |
|
593 | 593 | self._cache = {} # key: (func, data) |
|
594 | 594 | |
|
595 | 595 | def symbol(self, mapping, key): |
|
596 | 596 | """Resolve symbol to value or function; None if nothing found""" |
|
597 | 597 | v = None |
|
598 | 598 | if key not in self._resources: |
|
599 | 599 | v = mapping.get(key) |
|
600 | 600 | if v is None: |
|
601 | 601 | v = self._defaults.get(key) |
|
602 | 602 | return v |
|
603 | 603 | |
|
604 | 604 | def resource(self, mapping, key): |
|
605 | 605 | """Return internal data (e.g. cache) used for keyword/function |
|
606 | 606 | evaluation""" |
|
607 | 607 | v = None |
|
608 | 608 | if key in self._resources: |
|
609 | 609 | v = self._resources[key](self, mapping, key) |
|
610 | 610 | if v is None: |
|
611 | 611 | raise templateutil.ResourceUnavailable( |
|
612 | 612 | _('template resource not available: %s') % key) |
|
613 | 613 | return v |
|
614 | 614 | |
|
615 | 615 | def _load(self, t): |
|
616 | 616 | '''load, parse, and cache a template''' |
|
617 | 617 | if t not in self._cache: |
|
618 | 618 | # put poison to cut recursion while compiling 't' |
|
619 | 619 | self._cache[t] = (_runrecursivesymbol, t) |
|
620 | 620 | try: |
|
621 | 621 | x = parse(self._loader(t)) |
|
622 | 622 | if self._aliasmap: |
|
623 | 623 | x = _aliasrules.expand(self._aliasmap, x) |
|
624 | 624 | self._cache[t] = compileexp(x, self, methods) |
|
625 | 625 | except: # re-raises |
|
626 | 626 | del self._cache[t] |
|
627 | 627 | raise |
|
628 | 628 | return self._cache[t] |
|
629 | 629 | |
|
630 | 630 | def process(self, t, mapping): |
|
631 | 631 | '''Perform expansion. t is name of map element to expand. |
|
632 | 632 | mapping contains added elements for use during expansion. Is a |
|
633 | 633 | generator.''' |
|
634 | 634 | func, data = self._load(t) |
|
635 | 635 | return _flatten(func(self, mapping, data)) |
|
636 | 636 | |
|
637 | 637 | engines = {'default': engine} |
|
638 | 638 | |
|
639 | 639 | def stylelist(): |
|
640 | 640 | paths = templatepaths() |
|
641 | 641 | if not paths: |
|
642 | 642 | return _('no templates found, try `hg debuginstall` for more info') |
|
643 | 643 | dirlist = os.listdir(paths[0]) |
|
644 | 644 | stylelist = [] |
|
645 | 645 | for file in dirlist: |
|
646 | 646 | split = file.split(".") |
|
647 | 647 | if split[-1] in ('orig', 'rej'): |
|
648 | 648 | continue |
|
649 | 649 | if split[0] == "map-cmdline": |
|
650 | 650 | stylelist.append(split[1]) |
|
651 | 651 | return ", ".join(sorted(stylelist)) |
|
652 | 652 | |
|
653 | 653 | def _readmapfile(mapfile): |
|
654 | 654 | """Load template elements from the given map file""" |
|
655 | 655 | if not os.path.exists(mapfile): |
|
656 | 656 | raise error.Abort(_("style '%s' not found") % mapfile, |
|
657 | 657 | hint=_("available styles: %s") % stylelist()) |
|
658 | 658 | |
|
659 | 659 | base = os.path.dirname(mapfile) |
|
660 | 660 | conf = config.config(includepaths=templatepaths()) |
|
661 | 661 | conf.read(mapfile, remap={'': 'templates'}) |
|
662 | 662 | |
|
663 | 663 | cache = {} |
|
664 | 664 | tmap = {} |
|
665 | 665 | aliases = [] |
|
666 | 666 | |
|
667 | 667 | val = conf.get('templates', '__base__') |
|
668 | 668 | if val and val[0] not in "'\"": |
|
669 | 669 | # treat as a pointer to a base class for this style |
|
670 | 670 | path = util.normpath(os.path.join(base, val)) |
|
671 | 671 | |
|
672 | 672 | # fallback check in template paths |
|
673 | 673 | if not os.path.exists(path): |
|
674 | 674 | for p in templatepaths(): |
|
675 | 675 | p2 = util.normpath(os.path.join(p, val)) |
|
676 | 676 | if os.path.isfile(p2): |
|
677 | 677 | path = p2 |
|
678 | 678 | break |
|
679 | 679 | p3 = util.normpath(os.path.join(p2, "map")) |
|
680 | 680 | if os.path.isfile(p3): |
|
681 | 681 | path = p3 |
|
682 | 682 | break |
|
683 | 683 | |
|
684 | 684 | cache, tmap, aliases = _readmapfile(path) |
|
685 | 685 | |
|
686 | 686 | for key, val in conf['templates'].items(): |
|
687 | 687 | if not val: |
|
688 | 688 | raise error.ParseError(_('missing value'), |
|
689 | 689 | conf.source('templates', key)) |
|
690 | 690 | if val[0] in "'\"": |
|
691 | 691 | if val[0] != val[-1]: |
|
692 | 692 | raise error.ParseError(_('unmatched quotes'), |
|
693 | 693 | conf.source('templates', key)) |
|
694 | 694 | cache[key] = unquotestring(val) |
|
695 | 695 | elif key != '__base__': |
|
696 | 696 | val = 'default', val |
|
697 | 697 | if ':' in val[1]: |
|
698 | 698 | val = val[1].split(':', 1) |
|
699 | 699 | tmap[key] = val[0], os.path.join(base, val[1]) |
|
700 | 700 | aliases.extend(conf['templatealias'].items()) |
|
701 | 701 | return cache, tmap, aliases |
|
702 | 702 | |
|
703 | 703 | class templater(object): |
|
704 | 704 | |
|
705 | 705 | def __init__(self, filters=None, defaults=None, resources=None, |
|
706 | 706 | cache=None, aliases=(), minchunk=1024, maxchunk=65536): |
|
707 | 707 | """Create template engine optionally with preloaded template fragments |
|
708 | 708 | |
|
709 | 709 | - ``filters``: a dict of functions to transform a value into another. |
|
710 | 710 | - ``defaults``: a dict of symbol values/functions; may be overridden |
|
711 | 711 | by a ``mapping`` dict. |
|
712 | 712 | - ``resources``: a dict of functions returning internal data |
|
713 | 713 | (e.g. cache), inaccessible from user template. |
|
714 | 714 | - ``cache``: a dict of preloaded template fragments. |
|
715 | 715 | - ``aliases``: a list of alias (name, replacement) pairs. |
|
716 | 716 | |
|
717 | 717 | self.cache may be updated later to register additional template |
|
718 | 718 | fragments. |
|
719 | 719 | """ |
|
720 | 720 | if filters is None: |
|
721 | 721 | filters = {} |
|
722 | 722 | if defaults is None: |
|
723 | 723 | defaults = {} |
|
724 | 724 | if resources is None: |
|
725 | 725 | resources = {} |
|
726 | 726 | if cache is None: |
|
727 | 727 | cache = {} |
|
728 | 728 | self.cache = cache.copy() |
|
729 | 729 | self.map = {} |
|
730 | 730 | self.filters = templatefilters.filters.copy() |
|
731 | 731 | self.filters.update(filters) |
|
732 | 732 | self.defaults = defaults |
|
733 | 733 | self._resources = {'templ': lambda context, mapping, key: self} |
|
734 | 734 | self._resources.update(resources) |
|
735 | 735 | self._aliases = aliases |
|
736 | 736 | self.minchunk, self.maxchunk = minchunk, maxchunk |
|
737 | 737 | self.ecache = {} |
|
738 | 738 | |
|
739 | 739 | @classmethod |
|
740 | 740 | def frommapfile(cls, mapfile, filters=None, defaults=None, resources=None, |
|
741 | 741 | cache=None, minchunk=1024, maxchunk=65536): |
|
742 | 742 | """Create templater from the specified map file""" |
|
743 | 743 | t = cls(filters, defaults, resources, cache, [], minchunk, maxchunk) |
|
744 | 744 | cache, tmap, aliases = _readmapfile(mapfile) |
|
745 | 745 | t.cache.update(cache) |
|
746 | 746 | t.map = tmap |
|
747 | 747 | t._aliases = aliases |
|
748 | 748 | return t |
|
749 | 749 | |
|
750 | 750 | def __contains__(self, key): |
|
751 | 751 | return key in self.cache or key in self.map |
|
752 | 752 | |
|
753 | 753 | def load(self, t): |
|
754 | 754 | '''Get the template for the given template name. Use a local cache.''' |
|
755 | 755 | if t not in self.cache: |
|
756 | 756 | try: |
|
757 | 757 | self.cache[t] = util.readfile(self.map[t][1]) |
|
758 | 758 | except KeyError as inst: |
|
759 | 759 | raise templateutil.TemplateNotFound( |
|
760 | 760 | _('"%s" not in template map') % inst.args[0]) |
|
761 | 761 | except IOError as inst: |
|
762 | 762 | reason = (_('template file %s: %s') |
|
763 | 763 | % (self.map[t][1], util.forcebytestr(inst.args[1]))) |
|
764 | 764 | raise IOError(inst.args[0], encoding.strfromlocal(reason)) |
|
765 | 765 | return self.cache[t] |
|
766 | 766 | |
|
767 | 767 | def renderdefault(self, mapping): |
|
768 | 768 | """Render the default unnamed template and return result as string""" |
|
769 | 769 | return self.render('', mapping) |
|
770 | 770 | |
|
771 | 771 | def render(self, t, mapping): |
|
772 | 772 | """Render the specified named template and return result as string""" |
|
773 | mapping = pycompat.strkwargs(mapping) | |
|
774 | return templateutil.stringify(self(t, **mapping)) | |
|
773 | return templateutil.stringify(self.generate(t, mapping)) | |
|
775 | 774 | |
|
776 |
def |
|
|
777 | mapping = pycompat.byteskwargs(mapping) | |
|
775 | def generate(self, t, mapping): | |
|
776 | """Return a generator that renders the specified named template and | |
|
777 | yields chunks""" | |
|
778 | 778 | ttype = t in self.map and self.map[t][0] or 'default' |
|
779 | 779 | if ttype not in self.ecache: |
|
780 | 780 | try: |
|
781 | 781 | ecls = engines[ttype] |
|
782 | 782 | except KeyError: |
|
783 | 783 | raise error.Abort(_('invalid template engine: %s') % ttype) |
|
784 | 784 | self.ecache[ttype] = ecls(self.load, self.filters, self.defaults, |
|
785 | 785 | self._resources, self._aliases) |
|
786 | 786 | proc = self.ecache[ttype] |
|
787 | 787 | |
|
788 | 788 | stream = proc.process(t, mapping) |
|
789 | 789 | if self.minchunk: |
|
790 | 790 | stream = util.increasingchunks(stream, min=self.minchunk, |
|
791 | 791 | max=self.maxchunk) |
|
792 | 792 | return stream |
|
793 | 793 | |
|
794 | 794 | def templatepaths(): |
|
795 | 795 | '''return locations used for template files.''' |
|
796 | 796 | pathsrel = ['templates'] |
|
797 | 797 | paths = [os.path.normpath(os.path.join(util.datapath, f)) |
|
798 | 798 | for f in pathsrel] |
|
799 | 799 | return [p for p in paths if os.path.isdir(p)] |
|
800 | 800 | |
|
801 | 801 | def templatepath(name): |
|
802 | 802 | '''return location of template file. returns None if not found.''' |
|
803 | 803 | for p in templatepaths(): |
|
804 | 804 | f = os.path.join(p, name) |
|
805 | 805 | if os.path.exists(f): |
|
806 | 806 | return f |
|
807 | 807 | return None |
|
808 | 808 | |
|
809 | 809 | def stylemap(styles, paths=None): |
|
810 | 810 | """Return path to mapfile for a given style. |
|
811 | 811 | |
|
812 | 812 | Searches mapfile in the following locations: |
|
813 | 813 | 1. templatepath/style/map |
|
814 | 814 | 2. templatepath/map-style |
|
815 | 815 | 3. templatepath/map |
|
816 | 816 | """ |
|
817 | 817 | |
|
818 | 818 | if paths is None: |
|
819 | 819 | paths = templatepaths() |
|
820 | 820 | elif isinstance(paths, bytes): |
|
821 | 821 | paths = [paths] |
|
822 | 822 | |
|
823 | 823 | if isinstance(styles, bytes): |
|
824 | 824 | styles = [styles] |
|
825 | 825 | |
|
826 | 826 | for style in styles: |
|
827 | 827 | # only plain name is allowed to honor template paths |
|
828 | 828 | if (not style |
|
829 | 829 | or style in (pycompat.oscurdir, pycompat.ospardir) |
|
830 | 830 | or pycompat.ossep in style |
|
831 | 831 | or pycompat.osaltsep and pycompat.osaltsep in style): |
|
832 | 832 | continue |
|
833 | 833 | locations = [os.path.join(style, 'map'), 'map-' + style] |
|
834 | 834 | locations.append('map') |
|
835 | 835 | |
|
836 | 836 | for path in paths: |
|
837 | 837 | for location in locations: |
|
838 | 838 | mapfile = os.path.join(path, location) |
|
839 | 839 | if os.path.isfile(mapfile): |
|
840 | 840 | return style, mapfile |
|
841 | 841 | |
|
842 | 842 | raise RuntimeError("No hgweb templates found in %r" % paths) |
@@ -1,449 +1,448 b'' | |||
|
1 | 1 | # templateutil.py - utility for template evaluation |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | import types |
|
11 | 11 | |
|
12 | 12 | from .i18n import _ |
|
13 | 13 | from . import ( |
|
14 | 14 | error, |
|
15 | 15 | pycompat, |
|
16 | 16 | util, |
|
17 | 17 | ) |
|
18 | 18 | |
|
19 | 19 | class ResourceUnavailable(error.Abort): |
|
20 | 20 | pass |
|
21 | 21 | |
|
22 | 22 | class TemplateNotFound(error.Abort): |
|
23 | 23 | pass |
|
24 | 24 | |
|
25 | 25 | class hybrid(object): |
|
26 | 26 | """Wrapper for list or dict to support legacy template |
|
27 | 27 | |
|
28 | 28 | This class allows us to handle both: |
|
29 | 29 | - "{files}" (legacy command-line-specific list hack) and |
|
30 | 30 | - "{files % '{file}\n'}" (hgweb-style with inlining and function support) |
|
31 | 31 | and to access raw values: |
|
32 | 32 | - "{ifcontains(file, files, ...)}", "{ifcontains(key, extras, ...)}" |
|
33 | 33 | - "{get(extras, key)}" |
|
34 | 34 | - "{files|json}" |
|
35 | 35 | """ |
|
36 | 36 | |
|
37 | 37 | def __init__(self, gen, values, makemap, joinfmt, keytype=None): |
|
38 | 38 | if gen is not None: |
|
39 | 39 | self.gen = gen # generator or function returning generator |
|
40 | 40 | self._values = values |
|
41 | 41 | self._makemap = makemap |
|
42 | 42 | self.joinfmt = joinfmt |
|
43 | 43 | self.keytype = keytype # hint for 'x in y' where type(x) is unresolved |
|
44 | 44 | def gen(self): |
|
45 | 45 | """Default generator to stringify this as {join(self, ' ')}""" |
|
46 | 46 | for i, x in enumerate(self._values): |
|
47 | 47 | if i > 0: |
|
48 | 48 | yield ' ' |
|
49 | 49 | yield self.joinfmt(x) |
|
50 | 50 | def itermaps(self): |
|
51 | 51 | makemap = self._makemap |
|
52 | 52 | for x in self._values: |
|
53 | 53 | yield makemap(x) |
|
54 | 54 | def __contains__(self, x): |
|
55 | 55 | return x in self._values |
|
56 | 56 | def __getitem__(self, key): |
|
57 | 57 | return self._values[key] |
|
58 | 58 | def __len__(self): |
|
59 | 59 | return len(self._values) |
|
60 | 60 | def __iter__(self): |
|
61 | 61 | return iter(self._values) |
|
62 | 62 | def __getattr__(self, name): |
|
63 | 63 | if name not in (r'get', r'items', r'iteritems', r'iterkeys', |
|
64 | 64 | r'itervalues', r'keys', r'values'): |
|
65 | 65 | raise AttributeError(name) |
|
66 | 66 | return getattr(self._values, name) |
|
67 | 67 | |
|
68 | 68 | class mappable(object): |
|
69 | 69 | """Wrapper for non-list/dict object to support map operation |
|
70 | 70 | |
|
71 | 71 | This class allows us to handle both: |
|
72 | 72 | - "{manifest}" |
|
73 | 73 | - "{manifest % '{rev}:{node}'}" |
|
74 | 74 | - "{manifest.rev}" |
|
75 | 75 | |
|
76 | 76 | Unlike a hybrid, this does not simulate the behavior of the underling |
|
77 | 77 | value. Use unwrapvalue() or unwraphybrid() to obtain the inner object. |
|
78 | 78 | """ |
|
79 | 79 | |
|
80 | 80 | def __init__(self, gen, key, value, makemap): |
|
81 | 81 | if gen is not None: |
|
82 | 82 | self.gen = gen # generator or function returning generator |
|
83 | 83 | self._key = key |
|
84 | 84 | self._value = value # may be generator of strings |
|
85 | 85 | self._makemap = makemap |
|
86 | 86 | |
|
87 | 87 | def gen(self): |
|
88 | 88 | yield pycompat.bytestr(self._value) |
|
89 | 89 | |
|
90 | 90 | def tomap(self): |
|
91 | 91 | return self._makemap(self._key) |
|
92 | 92 | |
|
93 | 93 | def itermaps(self): |
|
94 | 94 | yield self.tomap() |
|
95 | 95 | |
|
96 | 96 | def hybriddict(data, key='key', value='value', fmt=None, gen=None): |
|
97 | 97 | """Wrap data to support both dict-like and string-like operations""" |
|
98 | 98 | prefmt = pycompat.identity |
|
99 | 99 | if fmt is None: |
|
100 | 100 | fmt = '%s=%s' |
|
101 | 101 | prefmt = pycompat.bytestr |
|
102 | 102 | return hybrid(gen, data, lambda k: {key: k, value: data[k]}, |
|
103 | 103 | lambda k: fmt % (prefmt(k), prefmt(data[k]))) |
|
104 | 104 | |
|
105 | 105 | def hybridlist(data, name, fmt=None, gen=None): |
|
106 | 106 | """Wrap data to support both list-like and string-like operations""" |
|
107 | 107 | prefmt = pycompat.identity |
|
108 | 108 | if fmt is None: |
|
109 | 109 | fmt = '%s' |
|
110 | 110 | prefmt = pycompat.bytestr |
|
111 | 111 | return hybrid(gen, data, lambda x: {name: x}, lambda x: fmt % prefmt(x)) |
|
112 | 112 | |
|
113 | 113 | def unwraphybrid(thing): |
|
114 | 114 | """Return an object which can be stringified possibly by using a legacy |
|
115 | 115 | template""" |
|
116 | 116 | gen = getattr(thing, 'gen', None) |
|
117 | 117 | if gen is None: |
|
118 | 118 | return thing |
|
119 | 119 | if callable(gen): |
|
120 | 120 | return gen() |
|
121 | 121 | return gen |
|
122 | 122 | |
|
123 | 123 | def unwrapvalue(thing): |
|
124 | 124 | """Move the inner value object out of the wrapper""" |
|
125 | 125 | if not util.safehasattr(thing, '_value'): |
|
126 | 126 | return thing |
|
127 | 127 | return thing._value |
|
128 | 128 | |
|
129 | 129 | def wraphybridvalue(container, key, value): |
|
130 | 130 | """Wrap an element of hybrid container to be mappable |
|
131 | 131 | |
|
132 | 132 | The key is passed to the makemap function of the given container, which |
|
133 | 133 | should be an item generated by iter(container). |
|
134 | 134 | """ |
|
135 | 135 | makemap = getattr(container, '_makemap', None) |
|
136 | 136 | if makemap is None: |
|
137 | 137 | return value |
|
138 | 138 | if util.safehasattr(value, '_makemap'): |
|
139 | 139 | # a nested hybrid list/dict, which has its own way of map operation |
|
140 | 140 | return value |
|
141 | 141 | return mappable(None, key, value, makemap) |
|
142 | 142 | |
|
143 | 143 | def compatdict(context, mapping, name, data, key='key', value='value', |
|
144 | 144 | fmt=None, plural=None, separator=' '): |
|
145 | 145 | """Wrap data like hybriddict(), but also supports old-style list template |
|
146 | 146 | |
|
147 | 147 | This exists for backward compatibility with the old-style template. Use |
|
148 | 148 | hybriddict() for new template keywords. |
|
149 | 149 | """ |
|
150 | 150 | c = [{key: k, value: v} for k, v in data.iteritems()] |
|
151 | 151 | t = context.resource(mapping, 'templ') |
|
152 | 152 | f = _showlist(name, c, t, mapping, plural, separator) |
|
153 | 153 | return hybriddict(data, key=key, value=value, fmt=fmt, gen=f) |
|
154 | 154 | |
|
155 | 155 | def compatlist(context, mapping, name, data, element=None, fmt=None, |
|
156 | 156 | plural=None, separator=' '): |
|
157 | 157 | """Wrap data like hybridlist(), but also supports old-style list template |
|
158 | 158 | |
|
159 | 159 | This exists for backward compatibility with the old-style template. Use |
|
160 | 160 | hybridlist() for new template keywords. |
|
161 | 161 | """ |
|
162 | 162 | t = context.resource(mapping, 'templ') |
|
163 | 163 | f = _showlist(name, data, t, mapping, plural, separator) |
|
164 | 164 | return hybridlist(data, name=element or name, fmt=fmt, gen=f) |
|
165 | 165 | |
|
166 | 166 | def _showlist(name, values, templ, mapping, plural=None, separator=' '): |
|
167 | 167 | '''expand set of values. |
|
168 | 168 | name is name of key in template map. |
|
169 | 169 | values is list of strings or dicts. |
|
170 | 170 | plural is plural of name, if not simply name + 's'. |
|
171 | 171 | separator is used to join values as a string |
|
172 | 172 | |
|
173 | 173 | expansion works like this, given name 'foo'. |
|
174 | 174 | |
|
175 | 175 | if values is empty, expand 'no_foos'. |
|
176 | 176 | |
|
177 | 177 | if 'foo' not in template map, return values as a string, |
|
178 | 178 | joined by 'separator'. |
|
179 | 179 | |
|
180 | 180 | expand 'start_foos'. |
|
181 | 181 | |
|
182 | 182 | for each value, expand 'foo'. if 'last_foo' in template |
|
183 | 183 | map, expand it instead of 'foo' for last key. |
|
184 | 184 | |
|
185 | 185 | expand 'end_foos'. |
|
186 | 186 | ''' |
|
187 | strmapping = pycompat.strkwargs(mapping) | |
|
188 | 187 | if not plural: |
|
189 | 188 | plural = name + 's' |
|
190 | 189 | if not values: |
|
191 | 190 | noname = 'no_' + plural |
|
192 | 191 | if noname in templ: |
|
193 |
yield templ(noname, |
|
|
192 | yield templ.generate(noname, mapping) | |
|
194 | 193 | return |
|
195 | 194 | if name not in templ: |
|
196 | 195 | if isinstance(values[0], bytes): |
|
197 | 196 | yield separator.join(values) |
|
198 | 197 | else: |
|
199 | 198 | for v in values: |
|
200 | 199 | r = dict(v) |
|
201 | 200 | r.update(mapping) |
|
202 | 201 | yield r |
|
203 | 202 | return |
|
204 | 203 | startname = 'start_' + plural |
|
205 | 204 | if startname in templ: |
|
206 |
yield templ(startname, |
|
|
205 | yield templ.generate(startname, mapping) | |
|
207 | 206 | vmapping = mapping.copy() |
|
208 | 207 | def one(v, tag=name): |
|
209 | 208 | try: |
|
210 | 209 | vmapping.update(v) |
|
211 | 210 | # Python 2 raises ValueError if the type of v is wrong. Python |
|
212 | 211 | # 3 raises TypeError. |
|
213 | 212 | except (AttributeError, TypeError, ValueError): |
|
214 | 213 | try: |
|
215 | 214 | # Python 2 raises ValueError trying to destructure an e.g. |
|
216 | 215 | # bytes. Python 3 raises TypeError. |
|
217 | 216 | for a, b in v: |
|
218 | 217 | vmapping[a] = b |
|
219 | 218 | except (TypeError, ValueError): |
|
220 | 219 | vmapping[name] = v |
|
221 |
return templ(tag, |
|
|
220 | return templ.generate(tag, vmapping) | |
|
222 | 221 | lastname = 'last_' + name |
|
223 | 222 | if lastname in templ: |
|
224 | 223 | last = values.pop() |
|
225 | 224 | else: |
|
226 | 225 | last = None |
|
227 | 226 | for v in values: |
|
228 | 227 | yield one(v) |
|
229 | 228 | if last is not None: |
|
230 | 229 | yield one(last, tag=lastname) |
|
231 | 230 | endname = 'end_' + plural |
|
232 | 231 | if endname in templ: |
|
233 |
yield templ(endname, |
|
|
232 | yield templ.generate(endname, mapping) | |
|
234 | 233 | |
|
235 | 234 | def stringify(thing): |
|
236 | 235 | """Turn values into bytes by converting into text and concatenating them""" |
|
237 | 236 | thing = unwraphybrid(thing) |
|
238 | 237 | if util.safehasattr(thing, '__iter__') and not isinstance(thing, bytes): |
|
239 | 238 | if isinstance(thing, str): |
|
240 | 239 | # This is only reachable on Python 3 (otherwise |
|
241 | 240 | # isinstance(thing, bytes) would have been true), and is |
|
242 | 241 | # here to prevent infinite recursion bugs on Python 3. |
|
243 | 242 | raise error.ProgrammingError( |
|
244 | 243 | 'stringify got unexpected unicode string: %r' % thing) |
|
245 | 244 | return "".join([stringify(t) for t in thing if t is not None]) |
|
246 | 245 | if thing is None: |
|
247 | 246 | return "" |
|
248 | 247 | return pycompat.bytestr(thing) |
|
249 | 248 | |
|
250 | 249 | def findsymbolicname(arg): |
|
251 | 250 | """Find symbolic name for the given compiled expression; returns None |
|
252 | 251 | if nothing found reliably""" |
|
253 | 252 | while True: |
|
254 | 253 | func, data = arg |
|
255 | 254 | if func is runsymbol: |
|
256 | 255 | return data |
|
257 | 256 | elif func is runfilter: |
|
258 | 257 | arg = data[0] |
|
259 | 258 | else: |
|
260 | 259 | return None |
|
261 | 260 | |
|
262 | 261 | def evalrawexp(context, mapping, arg): |
|
263 | 262 | """Evaluate given argument as a bare template object which may require |
|
264 | 263 | further processing (such as folding generator of strings)""" |
|
265 | 264 | func, data = arg |
|
266 | 265 | return func(context, mapping, data) |
|
267 | 266 | |
|
268 | 267 | def evalfuncarg(context, mapping, arg): |
|
269 | 268 | """Evaluate given argument as value type""" |
|
270 | 269 | thing = evalrawexp(context, mapping, arg) |
|
271 | 270 | thing = unwrapvalue(thing) |
|
272 | 271 | # evalrawexp() may return string, generator of strings or arbitrary object |
|
273 | 272 | # such as date tuple, but filter does not want generator. |
|
274 | 273 | if isinstance(thing, types.GeneratorType): |
|
275 | 274 | thing = stringify(thing) |
|
276 | 275 | return thing |
|
277 | 276 | |
|
278 | 277 | def evalboolean(context, mapping, arg): |
|
279 | 278 | """Evaluate given argument as boolean, but also takes boolean literals""" |
|
280 | 279 | func, data = arg |
|
281 | 280 | if func is runsymbol: |
|
282 | 281 | thing = func(context, mapping, data, default=None) |
|
283 | 282 | if thing is None: |
|
284 | 283 | # not a template keyword, takes as a boolean literal |
|
285 | 284 | thing = util.parsebool(data) |
|
286 | 285 | else: |
|
287 | 286 | thing = func(context, mapping, data) |
|
288 | 287 | thing = unwrapvalue(thing) |
|
289 | 288 | if isinstance(thing, bool): |
|
290 | 289 | return thing |
|
291 | 290 | # other objects are evaluated as strings, which means 0 is True, but |
|
292 | 291 | # empty dict/list should be False as they are expected to be '' |
|
293 | 292 | return bool(stringify(thing)) |
|
294 | 293 | |
|
295 | 294 | def evalinteger(context, mapping, arg, err=None): |
|
296 | 295 | v = evalfuncarg(context, mapping, arg) |
|
297 | 296 | try: |
|
298 | 297 | return int(v) |
|
299 | 298 | except (TypeError, ValueError): |
|
300 | 299 | raise error.ParseError(err or _('not an integer')) |
|
301 | 300 | |
|
302 | 301 | def evalstring(context, mapping, arg): |
|
303 | 302 | return stringify(evalrawexp(context, mapping, arg)) |
|
304 | 303 | |
|
305 | 304 | def evalstringliteral(context, mapping, arg): |
|
306 | 305 | """Evaluate given argument as string template, but returns symbol name |
|
307 | 306 | if it is unknown""" |
|
308 | 307 | func, data = arg |
|
309 | 308 | if func is runsymbol: |
|
310 | 309 | thing = func(context, mapping, data, default=data) |
|
311 | 310 | else: |
|
312 | 311 | thing = func(context, mapping, data) |
|
313 | 312 | return stringify(thing) |
|
314 | 313 | |
|
315 | 314 | _evalfuncbytype = { |
|
316 | 315 | bool: evalboolean, |
|
317 | 316 | bytes: evalstring, |
|
318 | 317 | int: evalinteger, |
|
319 | 318 | } |
|
320 | 319 | |
|
321 | 320 | def evalastype(context, mapping, arg, typ): |
|
322 | 321 | """Evaluate given argument and coerce its type""" |
|
323 | 322 | try: |
|
324 | 323 | f = _evalfuncbytype[typ] |
|
325 | 324 | except KeyError: |
|
326 | 325 | raise error.ProgrammingError('invalid type specified: %r' % typ) |
|
327 | 326 | return f(context, mapping, arg) |
|
328 | 327 | |
|
329 | 328 | def runinteger(context, mapping, data): |
|
330 | 329 | return int(data) |
|
331 | 330 | |
|
332 | 331 | def runstring(context, mapping, data): |
|
333 | 332 | return data |
|
334 | 333 | |
|
335 | 334 | def _recursivesymbolblocker(key): |
|
336 | 335 | def showrecursion(**args): |
|
337 | 336 | raise error.Abort(_("recursive reference '%s' in template") % key) |
|
338 | 337 | return showrecursion |
|
339 | 338 | |
|
340 | 339 | def runsymbol(context, mapping, key, default=''): |
|
341 | 340 | v = context.symbol(mapping, key) |
|
342 | 341 | if v is None: |
|
343 | 342 | # put poison to cut recursion. we can't move this to parsing phase |
|
344 | 343 | # because "x = {x}" is allowed if "x" is a keyword. (issue4758) |
|
345 | 344 | safemapping = mapping.copy() |
|
346 | 345 | safemapping[key] = _recursivesymbolblocker(key) |
|
347 | 346 | try: |
|
348 | 347 | v = context.process(key, safemapping) |
|
349 | 348 | except TemplateNotFound: |
|
350 | 349 | v = default |
|
351 | 350 | if callable(v) and getattr(v, '_requires', None) is None: |
|
352 | 351 | # old templatekw: expand all keywords and resources |
|
353 | 352 | props = {k: f(context, mapping, k) |
|
354 | 353 | for k, f in context._resources.items()} |
|
355 | 354 | props.update(mapping) |
|
356 | 355 | return v(**pycompat.strkwargs(props)) |
|
357 | 356 | if callable(v): |
|
358 | 357 | # new templatekw |
|
359 | 358 | try: |
|
360 | 359 | return v(context, mapping) |
|
361 | 360 | except ResourceUnavailable: |
|
362 | 361 | # unsupported keyword is mapped to empty just like unknown keyword |
|
363 | 362 | return None |
|
364 | 363 | return v |
|
365 | 364 | |
|
366 | 365 | def runtemplate(context, mapping, template): |
|
367 | 366 | for arg in template: |
|
368 | 367 | yield evalrawexp(context, mapping, arg) |
|
369 | 368 | |
|
370 | 369 | def runfilter(context, mapping, data): |
|
371 | 370 | arg, filt = data |
|
372 | 371 | thing = evalfuncarg(context, mapping, arg) |
|
373 | 372 | try: |
|
374 | 373 | return filt(thing) |
|
375 | 374 | except (ValueError, AttributeError, TypeError): |
|
376 | 375 | sym = findsymbolicname(arg) |
|
377 | 376 | if sym: |
|
378 | 377 | msg = (_("template filter '%s' is not compatible with keyword '%s'") |
|
379 | 378 | % (pycompat.sysbytes(filt.__name__), sym)) |
|
380 | 379 | else: |
|
381 | 380 | msg = (_("incompatible use of template filter '%s'") |
|
382 | 381 | % pycompat.sysbytes(filt.__name__)) |
|
383 | 382 | raise error.Abort(msg) |
|
384 | 383 | |
|
385 | 384 | def runmap(context, mapping, data): |
|
386 | 385 | darg, targ = data |
|
387 | 386 | d = evalrawexp(context, mapping, darg) |
|
388 | 387 | if util.safehasattr(d, 'itermaps'): |
|
389 | 388 | diter = d.itermaps() |
|
390 | 389 | else: |
|
391 | 390 | try: |
|
392 | 391 | diter = iter(d) |
|
393 | 392 | except TypeError: |
|
394 | 393 | sym = findsymbolicname(darg) |
|
395 | 394 | if sym: |
|
396 | 395 | raise error.ParseError(_("keyword '%s' is not iterable") % sym) |
|
397 | 396 | else: |
|
398 | 397 | raise error.ParseError(_("%r is not iterable") % d) |
|
399 | 398 | |
|
400 | 399 | for i, v in enumerate(diter): |
|
401 | 400 | lm = mapping.copy() |
|
402 | 401 | lm['index'] = i |
|
403 | 402 | if isinstance(v, dict): |
|
404 | 403 | lm.update(v) |
|
405 | 404 | lm['originalnode'] = mapping.get('node') |
|
406 | 405 | yield evalrawexp(context, lm, targ) |
|
407 | 406 | else: |
|
408 | 407 | # v is not an iterable of dicts, this happen when 'key' |
|
409 | 408 | # has been fully expanded already and format is useless. |
|
410 | 409 | # If so, return the expanded value. |
|
411 | 410 | yield v |
|
412 | 411 | |
|
413 | 412 | def runmember(context, mapping, data): |
|
414 | 413 | darg, memb = data |
|
415 | 414 | d = evalrawexp(context, mapping, darg) |
|
416 | 415 | if util.safehasattr(d, 'tomap'): |
|
417 | 416 | lm = mapping.copy() |
|
418 | 417 | lm.update(d.tomap()) |
|
419 | 418 | return runsymbol(context, lm, memb) |
|
420 | 419 | if util.safehasattr(d, 'get'): |
|
421 | 420 | return getdictitem(d, memb) |
|
422 | 421 | |
|
423 | 422 | sym = findsymbolicname(darg) |
|
424 | 423 | if sym: |
|
425 | 424 | raise error.ParseError(_("keyword '%s' has no member") % sym) |
|
426 | 425 | else: |
|
427 | 426 | raise error.ParseError(_("%r has no member") % pycompat.bytestr(d)) |
|
428 | 427 | |
|
429 | 428 | def runnegate(context, mapping, data): |
|
430 | 429 | data = evalinteger(context, mapping, data, |
|
431 | 430 | _('negation needs an integer argument')) |
|
432 | 431 | return -data |
|
433 | 432 | |
|
434 | 433 | def runarithmetic(context, mapping, data): |
|
435 | 434 | func, left, right = data |
|
436 | 435 | left = evalinteger(context, mapping, left, |
|
437 | 436 | _('arithmetic only defined on integers')) |
|
438 | 437 | right = evalinteger(context, mapping, right, |
|
439 | 438 | _('arithmetic only defined on integers')) |
|
440 | 439 | try: |
|
441 | 440 | return func(left, right) |
|
442 | 441 | except ZeroDivisionError: |
|
443 | 442 | raise error.Abort(_('division by zero is not defined')) |
|
444 | 443 | |
|
445 | 444 | def getdictitem(dictarg, key): |
|
446 | 445 | val = dictarg.get(key) |
|
447 | 446 | if val is None: |
|
448 | 447 | return |
|
449 | 448 | return wraphybridvalue(dictarg, key, val) |
General Comments 0
You need to be logged in to leave comments.
Login now