Show More
@@ -1,453 +1,454 b'' | |||||
1 | # hgweb/hgweb_mod.py - Web interface for a repository. |
|
1 | # hgweb/hgweb_mod.py - Web interface for a repository. | |
2 | # |
|
2 | # | |
3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> | |
4 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
4 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
5 | # |
|
5 | # | |
6 | # This software may be used and distributed according to the terms of the |
|
6 | # This software may be used and distributed according to the terms of the | |
7 | # GNU General Public License version 2 or any later version. |
|
7 | # GNU General Public License version 2 or any later version. | |
8 |
|
8 | |||
9 | from __future__ import absolute_import |
|
9 | from __future__ import absolute_import | |
10 |
|
10 | |||
11 | import contextlib |
|
11 | import contextlib | |
12 | import os |
|
12 | import os | |
13 |
|
13 | |||
14 | from .common import ( |
|
14 | from .common import ( | |
15 | ErrorResponse, |
|
15 | ErrorResponse, | |
16 | HTTP_BAD_REQUEST, |
|
16 | HTTP_BAD_REQUEST, | |
17 | cspvalues, |
|
17 | cspvalues, | |
18 | permhooks, |
|
18 | permhooks, | |
19 | statusmessage, |
|
19 | statusmessage, | |
20 | ) |
|
20 | ) | |
21 |
|
21 | |||
22 | from .. import ( |
|
22 | from .. import ( | |
23 | encoding, |
|
23 | encoding, | |
24 | error, |
|
24 | error, | |
25 | formatter, |
|
25 | formatter, | |
26 | hg, |
|
26 | hg, | |
27 | hook, |
|
27 | hook, | |
28 | profiling, |
|
28 | profiling, | |
29 | pycompat, |
|
29 | pycompat, | |
30 | repoview, |
|
30 | repoview, | |
31 | templatefilters, |
|
31 | templatefilters, | |
32 | templater, |
|
32 | templater, | |
33 | ui as uimod, |
|
33 | ui as uimod, | |
34 | util, |
|
34 | util, | |
35 | wireprotoserver, |
|
35 | wireprotoserver, | |
36 | ) |
|
36 | ) | |
37 |
|
37 | |||
38 | from . import ( |
|
38 | from . import ( | |
39 | request as requestmod, |
|
39 | request as requestmod, | |
40 | webcommands, |
|
40 | webcommands, | |
41 | webutil, |
|
41 | webutil, | |
42 | wsgicgi, |
|
42 | wsgicgi, | |
43 | ) |
|
43 | ) | |
44 |
|
44 | |||
45 | archivespecs = util.sortdict(( |
|
45 | archivespecs = util.sortdict(( | |
46 | ('zip', ('application/zip', 'zip', '.zip', None)), |
|
46 | ('zip', ('application/zip', 'zip', '.zip', None)), | |
47 | ('gz', ('application/x-gzip', 'tgz', '.tar.gz', None)), |
|
47 | ('gz', ('application/x-gzip', 'tgz', '.tar.gz', None)), | |
48 | ('bz2', ('application/x-bzip2', 'tbz2', '.tar.bz2', None)), |
|
48 | ('bz2', ('application/x-bzip2', 'tbz2', '.tar.bz2', None)), | |
49 | )) |
|
49 | )) | |
50 |
|
50 | |||
51 | def getstyle(req, configfn, templatepath): |
|
51 | def getstyle(req, configfn, templatepath): | |
52 | styles = ( |
|
52 | styles = ( | |
53 | req.qsparams.get('style', None), |
|
53 | req.qsparams.get('style', None), | |
54 | configfn('web', 'style'), |
|
54 | configfn('web', 'style'), | |
55 | 'paper', |
|
55 | 'paper', | |
56 | ) |
|
56 | ) | |
57 | return styles, templater.stylemap(styles, templatepath) |
|
57 | return styles, templater.stylemap(styles, templatepath) | |
58 |
|
58 | |||
59 | def makebreadcrumb(url, prefix=''): |
|
59 | def makebreadcrumb(url, prefix=''): | |
60 | '''Return a 'URL breadcrumb' list |
|
60 | '''Return a 'URL breadcrumb' list | |
61 |
|
61 | |||
62 | A 'URL breadcrumb' is a list of URL-name pairs, |
|
62 | A 'URL breadcrumb' is a list of URL-name pairs, | |
63 | corresponding to each of the path items on a URL. |
|
63 | corresponding to each of the path items on a URL. | |
64 | This can be used to create path navigation entries. |
|
64 | This can be used to create path navigation entries. | |
65 | ''' |
|
65 | ''' | |
66 | if url.endswith('/'): |
|
66 | if url.endswith('/'): | |
67 | url = url[:-1] |
|
67 | url = url[:-1] | |
68 | if prefix: |
|
68 | if prefix: | |
69 | url = '/' + prefix + url |
|
69 | url = '/' + prefix + url | |
70 | relpath = url |
|
70 | relpath = url | |
71 | if relpath.startswith('/'): |
|
71 | if relpath.startswith('/'): | |
72 | relpath = relpath[1:] |
|
72 | relpath = relpath[1:] | |
73 |
|
73 | |||
74 | breadcrumb = [] |
|
74 | breadcrumb = [] | |
75 | urlel = url |
|
75 | urlel = url | |
76 | pathitems = [''] + relpath.split('/') |
|
76 | pathitems = [''] + relpath.split('/') | |
77 | for pathel in reversed(pathitems): |
|
77 | for pathel in reversed(pathitems): | |
78 | if not pathel or not urlel: |
|
78 | if not pathel or not urlel: | |
79 | break |
|
79 | break | |
80 | breadcrumb.append({'url': urlel, 'name': pathel}) |
|
80 | breadcrumb.append({'url': urlel, 'name': pathel}) | |
81 | urlel = os.path.dirname(urlel) |
|
81 | urlel = os.path.dirname(urlel) | |
82 | return reversed(breadcrumb) |
|
82 | return reversed(breadcrumb) | |
83 |
|
83 | |||
84 | class requestcontext(object): |
|
84 | class requestcontext(object): | |
85 | """Holds state/context for an individual request. |
|
85 | """Holds state/context for an individual request. | |
86 |
|
86 | |||
87 | Servers can be multi-threaded. Holding state on the WSGI application |
|
87 | Servers can be multi-threaded. Holding state on the WSGI application | |
88 | is prone to race conditions. Instances of this class exist to hold |
|
88 | is prone to race conditions. Instances of this class exist to hold | |
89 | mutable and race-free state for requests. |
|
89 | mutable and race-free state for requests. | |
90 | """ |
|
90 | """ | |
91 | def __init__(self, app, repo, req, res): |
|
91 | def __init__(self, app, repo, req, res): | |
92 | self.repo = repo |
|
92 | self.repo = repo | |
93 | self.reponame = app.reponame |
|
93 | self.reponame = app.reponame | |
94 | self.req = req |
|
94 | self.req = req | |
95 | self.res = res |
|
95 | self.res = res | |
96 |
|
96 | |||
97 | self.archivespecs = archivespecs |
|
97 | self.archivespecs = archivespecs | |
98 |
|
98 | |||
99 | self.maxchanges = self.configint('web', 'maxchanges') |
|
99 | self.maxchanges = self.configint('web', 'maxchanges') | |
100 | self.stripecount = self.configint('web', 'stripes') |
|
100 | self.stripecount = self.configint('web', 'stripes') | |
101 | self.maxshortchanges = self.configint('web', 'maxshortchanges') |
|
101 | self.maxshortchanges = self.configint('web', 'maxshortchanges') | |
102 | self.maxfiles = self.configint('web', 'maxfiles') |
|
102 | self.maxfiles = self.configint('web', 'maxfiles') | |
103 | self.allowpull = self.configbool('web', 'allow-pull') |
|
103 | self.allowpull = self.configbool('web', 'allow-pull') | |
104 |
|
104 | |||
105 | # we use untrusted=False to prevent a repo owner from using |
|
105 | # we use untrusted=False to prevent a repo owner from using | |
106 | # web.templates in .hg/hgrc to get access to any file readable |
|
106 | # web.templates in .hg/hgrc to get access to any file readable | |
107 | # by the user running the CGI script |
|
107 | # by the user running the CGI script | |
108 | self.templatepath = self.config('web', 'templates', untrusted=False) |
|
108 | self.templatepath = self.config('web', 'templates', untrusted=False) | |
109 |
|
109 | |||
110 | # This object is more expensive to build than simple config values. |
|
110 | # This object is more expensive to build than simple config values. | |
111 | # It is shared across requests. The app will replace the object |
|
111 | # It is shared across requests. The app will replace the object | |
112 | # if it is updated. Since this is a reference and nothing should |
|
112 | # if it is updated. Since this is a reference and nothing should | |
113 | # modify the underlying object, it should be constant for the lifetime |
|
113 | # modify the underlying object, it should be constant for the lifetime | |
114 | # of the request. |
|
114 | # of the request. | |
115 | self.websubtable = app.websubtable |
|
115 | self.websubtable = app.websubtable | |
116 |
|
116 | |||
117 | self.csp, self.nonce = cspvalues(self.repo.ui) |
|
117 | self.csp, self.nonce = cspvalues(self.repo.ui) | |
118 |
|
118 | |||
119 | # Trust the settings from the .hg/hgrc files by default. |
|
119 | # Trust the settings from the .hg/hgrc files by default. | |
120 | def config(self, section, name, default=uimod._unset, untrusted=True): |
|
120 | def config(self, section, name, default=uimod._unset, untrusted=True): | |
121 | return self.repo.ui.config(section, name, default, |
|
121 | return self.repo.ui.config(section, name, default, | |
122 | untrusted=untrusted) |
|
122 | untrusted=untrusted) | |
123 |
|
123 | |||
124 | def configbool(self, section, name, default=uimod._unset, untrusted=True): |
|
124 | def configbool(self, section, name, default=uimod._unset, untrusted=True): | |
125 | return self.repo.ui.configbool(section, name, default, |
|
125 | return self.repo.ui.configbool(section, name, default, | |
126 | untrusted=untrusted) |
|
126 | untrusted=untrusted) | |
127 |
|
127 | |||
128 | def configint(self, section, name, default=uimod._unset, untrusted=True): |
|
128 | def configint(self, section, name, default=uimod._unset, untrusted=True): | |
129 | return self.repo.ui.configint(section, name, default, |
|
129 | return self.repo.ui.configint(section, name, default, | |
130 | untrusted=untrusted) |
|
130 | untrusted=untrusted) | |
131 |
|
131 | |||
132 | def configlist(self, section, name, default=uimod._unset, untrusted=True): |
|
132 | def configlist(self, section, name, default=uimod._unset, untrusted=True): | |
133 | return self.repo.ui.configlist(section, name, default, |
|
133 | return self.repo.ui.configlist(section, name, default, | |
134 | untrusted=untrusted) |
|
134 | untrusted=untrusted) | |
135 |
|
135 | |||
136 | def archivelist(self, nodeid): |
|
136 | def archivelist(self, nodeid): | |
137 | allowed = self.configlist('web', 'allow_archive') |
|
137 | allowed = self.configlist('web', 'allow_archive') | |
138 | for typ, spec in self.archivespecs.iteritems(): |
|
138 | for typ, spec in self.archivespecs.iteritems(): | |
139 | if typ in allowed or self.configbool('web', 'allow%s' % typ): |
|
139 | if typ in allowed or self.configbool('web', 'allow%s' % typ): | |
140 | yield {'type': typ, 'extension': spec[2], 'node': nodeid} |
|
140 | yield {'type': typ, 'extension': spec[2], 'node': nodeid} | |
141 |
|
141 | |||
142 | def templater(self, req): |
|
142 | def templater(self, req): | |
143 | # determine scheme, port and server name |
|
143 | # determine scheme, port and server name | |
144 | # this is needed to create absolute urls |
|
144 | # this is needed to create absolute urls | |
145 | logourl = self.config('web', 'logourl') |
|
145 | logourl = self.config('web', 'logourl') | |
146 | logoimg = self.config('web', 'logoimg') |
|
146 | logoimg = self.config('web', 'logoimg') | |
147 | staticurl = (self.config('web', 'staticurl') |
|
147 | staticurl = (self.config('web', 'staticurl') | |
148 | or req.apppath + '/static/') |
|
148 | or req.apppath + '/static/') | |
149 | if not staticurl.endswith('/'): |
|
149 | if not staticurl.endswith('/'): | |
150 | staticurl += '/' |
|
150 | staticurl += '/' | |
151 |
|
151 | |||
152 | # some functions for the templater |
|
152 | # some functions for the templater | |
153 |
|
153 | |||
154 | def motd(**map): |
|
154 | def motd(**map): | |
155 | yield self.config('web', 'motd') |
|
155 | yield self.config('web', 'motd') | |
156 |
|
156 | |||
157 | # figure out which style to use |
|
157 | # figure out which style to use | |
158 |
|
158 | |||
159 | vars = {} |
|
159 | vars = {} | |
160 | styles, (style, mapfile) = getstyle(req, self.config, |
|
160 | styles, (style, mapfile) = getstyle(req, self.config, | |
161 | self.templatepath) |
|
161 | self.templatepath) | |
162 | if style == styles[0]: |
|
162 | if style == styles[0]: | |
163 | vars['style'] = style |
|
163 | vars['style'] = style | |
164 |
|
164 | |||
165 | sessionvars = webutil.sessionvars(vars, '?') |
|
165 | sessionvars = webutil.sessionvars(vars, '?') | |
166 |
|
166 | |||
167 | if not self.reponame: |
|
167 | if not self.reponame: | |
168 | self.reponame = (self.config('web', 'name', '') |
|
168 | self.reponame = (self.config('web', 'name', '') | |
169 | or req.reponame |
|
169 | or req.reponame | |
170 | or req.apppath |
|
170 | or req.apppath | |
171 | or self.repo.root) |
|
171 | or self.repo.root) | |
172 |
|
172 | |||
173 | def websubfilter(text): |
|
173 | def websubfilter(text): | |
174 | return templatefilters.websub(text, self.websubtable) |
|
174 | return templatefilters.websub(text, self.websubtable) | |
175 |
|
175 | |||
176 | # create the templater |
|
176 | # create the templater | |
177 | # TODO: export all keywords: defaults = templatekw.keywords.copy() |
|
177 | # TODO: export all keywords: defaults = templatekw.keywords.copy() | |
178 | defaults = { |
|
178 | defaults = { | |
179 | 'url': req.apppath + '/', |
|
179 | 'url': req.apppath + '/', | |
180 | 'logourl': logourl, |
|
180 | 'logourl': logourl, | |
181 | 'logoimg': logoimg, |
|
181 | 'logoimg': logoimg, | |
182 | 'staticurl': staticurl, |
|
182 | 'staticurl': staticurl, | |
183 | 'urlbase': req.advertisedbaseurl, |
|
183 | 'urlbase': req.advertisedbaseurl, | |
184 | 'repo': self.reponame, |
|
184 | 'repo': self.reponame, | |
185 | 'encoding': encoding.encoding, |
|
185 | 'encoding': encoding.encoding, | |
186 | 'motd': motd, |
|
186 | 'motd': motd, | |
187 | 'sessionvars': sessionvars, |
|
187 | 'sessionvars': sessionvars, | |
188 | 'pathdef': makebreadcrumb(req.apppath), |
|
188 | 'pathdef': makebreadcrumb(req.apppath), | |
189 | 'style': style, |
|
189 | 'style': style, | |
190 | 'nonce': self.nonce, |
|
190 | 'nonce': self.nonce, | |
191 | } |
|
191 | } | |
192 | tres = formatter.templateresources(self.repo.ui, self.repo) |
|
192 | tres = formatter.templateresources(self.repo.ui, self.repo) | |
193 | tmpl = templater.templater.frommapfile(mapfile, |
|
193 | tmpl = templater.templater.frommapfile(mapfile, | |
194 | filters={'websub': websubfilter}, |
|
194 | filters={'websub': websubfilter}, | |
195 | defaults=defaults, |
|
195 | defaults=defaults, | |
196 | resources=tres) |
|
196 | resources=tres) | |
197 | return tmpl |
|
197 | return tmpl | |
198 |
|
198 | |||
199 | def sendtemplate(self, name, **kwargs): |
|
199 | def sendtemplate(self, name, **kwargs): | |
200 | """Helper function to send a response generated from a template.""" |
|
200 | """Helper function to send a response generated from a template.""" | |
201 | self.res.setbodygen(self.tmpl(name, **kwargs)) |
|
201 | kwargs = pycompat.byteskwargs(kwargs) | |
|
202 | self.res.setbodygen(self.tmpl.generate(name, kwargs)) | |||
202 | return self.res.sendresponse() |
|
203 | return self.res.sendresponse() | |
203 |
|
204 | |||
204 | class hgweb(object): |
|
205 | class hgweb(object): | |
205 | """HTTP server for individual repositories. |
|
206 | """HTTP server for individual repositories. | |
206 |
|
207 | |||
207 | Instances of this class serve HTTP responses for a particular |
|
208 | Instances of this class serve HTTP responses for a particular | |
208 | repository. |
|
209 | repository. | |
209 |
|
210 | |||
210 | Instances are typically used as WSGI applications. |
|
211 | Instances are typically used as WSGI applications. | |
211 |
|
212 | |||
212 | Some servers are multi-threaded. On these servers, there may |
|
213 | Some servers are multi-threaded. On these servers, there may | |
213 | be multiple active threads inside __call__. |
|
214 | be multiple active threads inside __call__. | |
214 | """ |
|
215 | """ | |
215 | def __init__(self, repo, name=None, baseui=None): |
|
216 | def __init__(self, repo, name=None, baseui=None): | |
216 | if isinstance(repo, str): |
|
217 | if isinstance(repo, str): | |
217 | if baseui: |
|
218 | if baseui: | |
218 | u = baseui.copy() |
|
219 | u = baseui.copy() | |
219 | else: |
|
220 | else: | |
220 | u = uimod.ui.load() |
|
221 | u = uimod.ui.load() | |
221 | r = hg.repository(u, repo) |
|
222 | r = hg.repository(u, repo) | |
222 | else: |
|
223 | else: | |
223 | # we trust caller to give us a private copy |
|
224 | # we trust caller to give us a private copy | |
224 | r = repo |
|
225 | r = repo | |
225 |
|
226 | |||
226 | r.ui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') |
|
227 | r.ui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') | |
227 | r.baseui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') |
|
228 | r.baseui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') | |
228 | r.ui.setconfig('ui', 'nontty', 'true', 'hgweb') |
|
229 | r.ui.setconfig('ui', 'nontty', 'true', 'hgweb') | |
229 | r.baseui.setconfig('ui', 'nontty', 'true', 'hgweb') |
|
230 | r.baseui.setconfig('ui', 'nontty', 'true', 'hgweb') | |
230 | # resolve file patterns relative to repo root |
|
231 | # resolve file patterns relative to repo root | |
231 | r.ui.setconfig('ui', 'forcecwd', r.root, 'hgweb') |
|
232 | r.ui.setconfig('ui', 'forcecwd', r.root, 'hgweb') | |
232 | r.baseui.setconfig('ui', 'forcecwd', r.root, 'hgweb') |
|
233 | r.baseui.setconfig('ui', 'forcecwd', r.root, 'hgweb') | |
233 | # displaying bundling progress bar while serving feel wrong and may |
|
234 | # displaying bundling progress bar while serving feel wrong and may | |
234 | # break some wsgi implementation. |
|
235 | # break some wsgi implementation. | |
235 | r.ui.setconfig('progress', 'disable', 'true', 'hgweb') |
|
236 | r.ui.setconfig('progress', 'disable', 'true', 'hgweb') | |
236 | r.baseui.setconfig('progress', 'disable', 'true', 'hgweb') |
|
237 | r.baseui.setconfig('progress', 'disable', 'true', 'hgweb') | |
237 | self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))] |
|
238 | self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))] | |
238 | self._lastrepo = self._repos[0] |
|
239 | self._lastrepo = self._repos[0] | |
239 | hook.redirect(True) |
|
240 | hook.redirect(True) | |
240 | self.reponame = name |
|
241 | self.reponame = name | |
241 |
|
242 | |||
242 | def _webifyrepo(self, repo): |
|
243 | def _webifyrepo(self, repo): | |
243 | repo = getwebview(repo) |
|
244 | repo = getwebview(repo) | |
244 | self.websubtable = webutil.getwebsubs(repo) |
|
245 | self.websubtable = webutil.getwebsubs(repo) | |
245 | return repo |
|
246 | return repo | |
246 |
|
247 | |||
247 | @contextlib.contextmanager |
|
248 | @contextlib.contextmanager | |
248 | def _obtainrepo(self): |
|
249 | def _obtainrepo(self): | |
249 | """Obtain a repo unique to the caller. |
|
250 | """Obtain a repo unique to the caller. | |
250 |
|
251 | |||
251 | Internally we maintain a stack of cachedlocalrepo instances |
|
252 | Internally we maintain a stack of cachedlocalrepo instances | |
252 | to be handed out. If one is available, we pop it and return it, |
|
253 | to be handed out. If one is available, we pop it and return it, | |
253 | ensuring it is up to date in the process. If one is not available, |
|
254 | ensuring it is up to date in the process. If one is not available, | |
254 | we clone the most recently used repo instance and return it. |
|
255 | we clone the most recently used repo instance and return it. | |
255 |
|
256 | |||
256 | It is currently possible for the stack to grow without bounds |
|
257 | It is currently possible for the stack to grow without bounds | |
257 | if the server allows infinite threads. However, servers should |
|
258 | if the server allows infinite threads. However, servers should | |
258 | have a thread limit, thus establishing our limit. |
|
259 | have a thread limit, thus establishing our limit. | |
259 | """ |
|
260 | """ | |
260 | if self._repos: |
|
261 | if self._repos: | |
261 | cached = self._repos.pop() |
|
262 | cached = self._repos.pop() | |
262 | r, created = cached.fetch() |
|
263 | r, created = cached.fetch() | |
263 | else: |
|
264 | else: | |
264 | cached = self._lastrepo.copy() |
|
265 | cached = self._lastrepo.copy() | |
265 | r, created = cached.fetch() |
|
266 | r, created = cached.fetch() | |
266 | if created: |
|
267 | if created: | |
267 | r = self._webifyrepo(r) |
|
268 | r = self._webifyrepo(r) | |
268 |
|
269 | |||
269 | self._lastrepo = cached |
|
270 | self._lastrepo = cached | |
270 | self.mtime = cached.mtime |
|
271 | self.mtime = cached.mtime | |
271 | try: |
|
272 | try: | |
272 | yield r |
|
273 | yield r | |
273 | finally: |
|
274 | finally: | |
274 | self._repos.append(cached) |
|
275 | self._repos.append(cached) | |
275 |
|
276 | |||
276 | def run(self): |
|
277 | def run(self): | |
277 | """Start a server from CGI environment. |
|
278 | """Start a server from CGI environment. | |
278 |
|
279 | |||
279 | Modern servers should be using WSGI and should avoid this |
|
280 | Modern servers should be using WSGI and should avoid this | |
280 | method, if possible. |
|
281 | method, if possible. | |
281 | """ |
|
282 | """ | |
282 | if not encoding.environ.get('GATEWAY_INTERFACE', |
|
283 | if not encoding.environ.get('GATEWAY_INTERFACE', | |
283 | '').startswith("CGI/1."): |
|
284 | '').startswith("CGI/1."): | |
284 | raise RuntimeError("This function is only intended to be " |
|
285 | raise RuntimeError("This function is only intended to be " | |
285 | "called while running as a CGI script.") |
|
286 | "called while running as a CGI script.") | |
286 | wsgicgi.launch(self) |
|
287 | wsgicgi.launch(self) | |
287 |
|
288 | |||
288 | def __call__(self, env, respond): |
|
289 | def __call__(self, env, respond): | |
289 | """Run the WSGI application. |
|
290 | """Run the WSGI application. | |
290 |
|
291 | |||
291 | This may be called by multiple threads. |
|
292 | This may be called by multiple threads. | |
292 | """ |
|
293 | """ | |
293 | req = requestmod.parserequestfromenv(env) |
|
294 | req = requestmod.parserequestfromenv(env) | |
294 | res = requestmod.wsgiresponse(req, respond) |
|
295 | res = requestmod.wsgiresponse(req, respond) | |
295 |
|
296 | |||
296 | return self.run_wsgi(req, res) |
|
297 | return self.run_wsgi(req, res) | |
297 |
|
298 | |||
298 | def run_wsgi(self, req, res): |
|
299 | def run_wsgi(self, req, res): | |
299 | """Internal method to run the WSGI application. |
|
300 | """Internal method to run the WSGI application. | |
300 |
|
301 | |||
301 | This is typically only called by Mercurial. External consumers |
|
302 | This is typically only called by Mercurial. External consumers | |
302 | should be using instances of this class as the WSGI application. |
|
303 | should be using instances of this class as the WSGI application. | |
303 | """ |
|
304 | """ | |
304 | with self._obtainrepo() as repo: |
|
305 | with self._obtainrepo() as repo: | |
305 | profile = repo.ui.configbool('profiling', 'enabled') |
|
306 | profile = repo.ui.configbool('profiling', 'enabled') | |
306 | with profiling.profile(repo.ui, enabled=profile): |
|
307 | with profiling.profile(repo.ui, enabled=profile): | |
307 | for r in self._runwsgi(req, res, repo): |
|
308 | for r in self._runwsgi(req, res, repo): | |
308 | yield r |
|
309 | yield r | |
309 |
|
310 | |||
310 | def _runwsgi(self, req, res, repo): |
|
311 | def _runwsgi(self, req, res, repo): | |
311 | rctx = requestcontext(self, repo, req, res) |
|
312 | rctx = requestcontext(self, repo, req, res) | |
312 |
|
313 | |||
313 | # This state is global across all threads. |
|
314 | # This state is global across all threads. | |
314 | encoding.encoding = rctx.config('web', 'encoding') |
|
315 | encoding.encoding = rctx.config('web', 'encoding') | |
315 | rctx.repo.ui.environ = req.rawenv |
|
316 | rctx.repo.ui.environ = req.rawenv | |
316 |
|
317 | |||
317 | if rctx.csp: |
|
318 | if rctx.csp: | |
318 | # hgwebdir may have added CSP header. Since we generate our own, |
|
319 | # hgwebdir may have added CSP header. Since we generate our own, | |
319 | # replace it. |
|
320 | # replace it. | |
320 | res.headers['Content-Security-Policy'] = rctx.csp |
|
321 | res.headers['Content-Security-Policy'] = rctx.csp | |
321 |
|
322 | |||
322 | handled = wireprotoserver.handlewsgirequest( |
|
323 | handled = wireprotoserver.handlewsgirequest( | |
323 | rctx, req, res, self.check_perm) |
|
324 | rctx, req, res, self.check_perm) | |
324 | if handled: |
|
325 | if handled: | |
325 | return res.sendresponse() |
|
326 | return res.sendresponse() | |
326 |
|
327 | |||
327 | # Old implementations of hgweb supported dispatching the request via |
|
328 | # Old implementations of hgweb supported dispatching the request via | |
328 | # the initial query string parameter instead of using PATH_INFO. |
|
329 | # the initial query string parameter instead of using PATH_INFO. | |
329 | # If PATH_INFO is present (signaled by ``req.dispatchpath`` having |
|
330 | # If PATH_INFO is present (signaled by ``req.dispatchpath`` having | |
330 | # a value), we use it. Otherwise fall back to the query string. |
|
331 | # a value), we use it. Otherwise fall back to the query string. | |
331 | if req.dispatchpath is not None: |
|
332 | if req.dispatchpath is not None: | |
332 | query = req.dispatchpath |
|
333 | query = req.dispatchpath | |
333 | else: |
|
334 | else: | |
334 | query = req.querystring.partition('&')[0].partition(';')[0] |
|
335 | query = req.querystring.partition('&')[0].partition(';')[0] | |
335 |
|
336 | |||
336 | # translate user-visible url structure to internal structure |
|
337 | # translate user-visible url structure to internal structure | |
337 |
|
338 | |||
338 | args = query.split('/', 2) |
|
339 | args = query.split('/', 2) | |
339 | if 'cmd' not in req.qsparams and args and args[0]: |
|
340 | if 'cmd' not in req.qsparams and args and args[0]: | |
340 | cmd = args.pop(0) |
|
341 | cmd = args.pop(0) | |
341 | style = cmd.rfind('-') |
|
342 | style = cmd.rfind('-') | |
342 | if style != -1: |
|
343 | if style != -1: | |
343 | req.qsparams['style'] = cmd[:style] |
|
344 | req.qsparams['style'] = cmd[:style] | |
344 | cmd = cmd[style + 1:] |
|
345 | cmd = cmd[style + 1:] | |
345 |
|
346 | |||
346 | # avoid accepting e.g. style parameter as command |
|
347 | # avoid accepting e.g. style parameter as command | |
347 | if util.safehasattr(webcommands, cmd): |
|
348 | if util.safehasattr(webcommands, cmd): | |
348 | req.qsparams['cmd'] = cmd |
|
349 | req.qsparams['cmd'] = cmd | |
349 |
|
350 | |||
350 | if cmd == 'static': |
|
351 | if cmd == 'static': | |
351 | req.qsparams['file'] = '/'.join(args) |
|
352 | req.qsparams['file'] = '/'.join(args) | |
352 | else: |
|
353 | else: | |
353 | if args and args[0]: |
|
354 | if args and args[0]: | |
354 | node = args.pop(0).replace('%2F', '/') |
|
355 | node = args.pop(0).replace('%2F', '/') | |
355 | req.qsparams['node'] = node |
|
356 | req.qsparams['node'] = node | |
356 | if args: |
|
357 | if args: | |
357 | if 'file' in req.qsparams: |
|
358 | if 'file' in req.qsparams: | |
358 | del req.qsparams['file'] |
|
359 | del req.qsparams['file'] | |
359 | for a in args: |
|
360 | for a in args: | |
360 | req.qsparams.add('file', a) |
|
361 | req.qsparams.add('file', a) | |
361 |
|
362 | |||
362 | ua = req.headers.get('User-Agent', '') |
|
363 | ua = req.headers.get('User-Agent', '') | |
363 | if cmd == 'rev' and 'mercurial' in ua: |
|
364 | if cmd == 'rev' and 'mercurial' in ua: | |
364 | req.qsparams['style'] = 'raw' |
|
365 | req.qsparams['style'] = 'raw' | |
365 |
|
366 | |||
366 | if cmd == 'archive': |
|
367 | if cmd == 'archive': | |
367 | fn = req.qsparams['node'] |
|
368 | fn = req.qsparams['node'] | |
368 | for type_, spec in rctx.archivespecs.iteritems(): |
|
369 | for type_, spec in rctx.archivespecs.iteritems(): | |
369 | ext = spec[2] |
|
370 | ext = spec[2] | |
370 | if fn.endswith(ext): |
|
371 | if fn.endswith(ext): | |
371 | req.qsparams['node'] = fn[:-len(ext)] |
|
372 | req.qsparams['node'] = fn[:-len(ext)] | |
372 | req.qsparams['type'] = type_ |
|
373 | req.qsparams['type'] = type_ | |
373 | else: |
|
374 | else: | |
374 | cmd = req.qsparams.get('cmd', '') |
|
375 | cmd = req.qsparams.get('cmd', '') | |
375 |
|
376 | |||
376 | # process the web interface request |
|
377 | # process the web interface request | |
377 |
|
378 | |||
378 | try: |
|
379 | try: | |
379 | rctx.tmpl = rctx.templater(req) |
|
380 | rctx.tmpl = rctx.templater(req) | |
380 | ctype = rctx.tmpl.render('mimetype', |
|
381 | ctype = rctx.tmpl.render('mimetype', | |
381 | {'encoding': encoding.encoding}) |
|
382 | {'encoding': encoding.encoding}) | |
382 |
|
383 | |||
383 | # check read permissions non-static content |
|
384 | # check read permissions non-static content | |
384 | if cmd != 'static': |
|
385 | if cmd != 'static': | |
385 | self.check_perm(rctx, req, None) |
|
386 | self.check_perm(rctx, req, None) | |
386 |
|
387 | |||
387 | if cmd == '': |
|
388 | if cmd == '': | |
388 | req.qsparams['cmd'] = rctx.tmpl.cache['default'] |
|
389 | req.qsparams['cmd'] = rctx.tmpl.cache['default'] | |
389 | cmd = req.qsparams['cmd'] |
|
390 | cmd = req.qsparams['cmd'] | |
390 |
|
391 | |||
391 | # Don't enable caching if using a CSP nonce because then it wouldn't |
|
392 | # Don't enable caching if using a CSP nonce because then it wouldn't | |
392 | # be a nonce. |
|
393 | # be a nonce. | |
393 | if rctx.configbool('web', 'cache') and not rctx.nonce: |
|
394 | if rctx.configbool('web', 'cache') and not rctx.nonce: | |
394 | tag = 'W/"%d"' % self.mtime |
|
395 | tag = 'W/"%d"' % self.mtime | |
395 | if req.headers.get('If-None-Match') == tag: |
|
396 | if req.headers.get('If-None-Match') == tag: | |
396 | res.status = '304 Not Modified' |
|
397 | res.status = '304 Not Modified' | |
397 | # Response body not allowed on 304. |
|
398 | # Response body not allowed on 304. | |
398 | res.setbodybytes('') |
|
399 | res.setbodybytes('') | |
399 | return res.sendresponse() |
|
400 | return res.sendresponse() | |
400 |
|
401 | |||
401 | res.headers['ETag'] = tag |
|
402 | res.headers['ETag'] = tag | |
402 |
|
403 | |||
403 | if cmd not in webcommands.__all__: |
|
404 | if cmd not in webcommands.__all__: | |
404 | msg = 'no such method: %s' % cmd |
|
405 | msg = 'no such method: %s' % cmd | |
405 | raise ErrorResponse(HTTP_BAD_REQUEST, msg) |
|
406 | raise ErrorResponse(HTTP_BAD_REQUEST, msg) | |
406 | else: |
|
407 | else: | |
407 | # Set some globals appropriate for web handlers. Commands can |
|
408 | # Set some globals appropriate for web handlers. Commands can | |
408 | # override easily enough. |
|
409 | # override easily enough. | |
409 | res.status = '200 Script output follows' |
|
410 | res.status = '200 Script output follows' | |
410 | res.headers['Content-Type'] = ctype |
|
411 | res.headers['Content-Type'] = ctype | |
411 | return getattr(webcommands, cmd)(rctx) |
|
412 | return getattr(webcommands, cmd)(rctx) | |
412 |
|
413 | |||
413 | except (error.LookupError, error.RepoLookupError) as err: |
|
414 | except (error.LookupError, error.RepoLookupError) as err: | |
414 | msg = pycompat.bytestr(err) |
|
415 | msg = pycompat.bytestr(err) | |
415 | if (util.safehasattr(err, 'name') and |
|
416 | if (util.safehasattr(err, 'name') and | |
416 | not isinstance(err, error.ManifestLookupError)): |
|
417 | not isinstance(err, error.ManifestLookupError)): | |
417 | msg = 'revision not found: %s' % err.name |
|
418 | msg = 'revision not found: %s' % err.name | |
418 |
|
419 | |||
419 | res.status = '404 Not Found' |
|
420 | res.status = '404 Not Found' | |
420 | res.headers['Content-Type'] = ctype |
|
421 | res.headers['Content-Type'] = ctype | |
421 | return rctx.sendtemplate('error', error=msg) |
|
422 | return rctx.sendtemplate('error', error=msg) | |
422 | except (error.RepoError, error.RevlogError) as e: |
|
423 | except (error.RepoError, error.RevlogError) as e: | |
423 | res.status = '500 Internal Server Error' |
|
424 | res.status = '500 Internal Server Error' | |
424 | res.headers['Content-Type'] = ctype |
|
425 | res.headers['Content-Type'] = ctype | |
425 | return rctx.sendtemplate('error', error=pycompat.bytestr(e)) |
|
426 | return rctx.sendtemplate('error', error=pycompat.bytestr(e)) | |
426 | except ErrorResponse as e: |
|
427 | except ErrorResponse as e: | |
427 | res.status = statusmessage(e.code, pycompat.bytestr(e)) |
|
428 | res.status = statusmessage(e.code, pycompat.bytestr(e)) | |
428 | res.headers['Content-Type'] = ctype |
|
429 | res.headers['Content-Type'] = ctype | |
429 | return rctx.sendtemplate('error', error=pycompat.bytestr(e)) |
|
430 | return rctx.sendtemplate('error', error=pycompat.bytestr(e)) | |
430 |
|
431 | |||
431 | def check_perm(self, rctx, req, op): |
|
432 | def check_perm(self, rctx, req, op): | |
432 | for permhook in permhooks: |
|
433 | for permhook in permhooks: | |
433 | permhook(rctx, req, op) |
|
434 | permhook(rctx, req, op) | |
434 |
|
435 | |||
435 | def getwebview(repo): |
|
436 | def getwebview(repo): | |
436 | """The 'web.view' config controls changeset filter to hgweb. Possible |
|
437 | """The 'web.view' config controls changeset filter to hgweb. Possible | |
437 | values are ``served``, ``visible`` and ``all``. Default is ``served``. |
|
438 | values are ``served``, ``visible`` and ``all``. Default is ``served``. | |
438 | The ``served`` filter only shows changesets that can be pulled from the |
|
439 | The ``served`` filter only shows changesets that can be pulled from the | |
439 | hgweb instance. The``visible`` filter includes secret changesets but |
|
440 | hgweb instance. The``visible`` filter includes secret changesets but | |
440 | still excludes "hidden" one. |
|
441 | still excludes "hidden" one. | |
441 |
|
442 | |||
442 | See the repoview module for details. |
|
443 | See the repoview module for details. | |
443 |
|
444 | |||
444 | The option has been around undocumented since Mercurial 2.5, but no |
|
445 | The option has been around undocumented since Mercurial 2.5, but no | |
445 | user ever asked about it. So we better keep it undocumented for now.""" |
|
446 | user ever asked about it. So we better keep it undocumented for now.""" | |
446 | # experimental config: web.view |
|
447 | # experimental config: web.view | |
447 | viewconfig = repo.ui.config('web', 'view', untrusted=True) |
|
448 | viewconfig = repo.ui.config('web', 'view', untrusted=True) | |
448 | if viewconfig == 'all': |
|
449 | if viewconfig == 'all': | |
449 | return repo.unfiltered() |
|
450 | return repo.unfiltered() | |
450 | elif viewconfig in repoview.filtertable: |
|
451 | elif viewconfig in repoview.filtertable: | |
451 | return repo.filtered(viewconfig) |
|
452 | return repo.filtered(viewconfig) | |
452 | else: |
|
453 | else: | |
453 | return repo.filtered('served') |
|
454 | return repo.filtered('served') |
@@ -1,536 +1,536 b'' | |||||
1 | # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories. |
|
1 | # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories. | |
2 | # |
|
2 | # | |
3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> | |
4 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> |
|
4 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> | |
5 | # |
|
5 | # | |
6 | # This software may be used and distributed according to the terms of the |
|
6 | # This software may be used and distributed according to the terms of the | |
7 | # GNU General Public License version 2 or any later version. |
|
7 | # GNU General Public License version 2 or any later version. | |
8 |
|
8 | |||
9 | from __future__ import absolute_import |
|
9 | from __future__ import absolute_import | |
10 |
|
10 | |||
11 | import gc |
|
11 | import gc | |
12 | import os |
|
12 | import os | |
13 | import time |
|
13 | import time | |
14 |
|
14 | |||
15 | from ..i18n import _ |
|
15 | from ..i18n import _ | |
16 |
|
16 | |||
17 | from .common import ( |
|
17 | from .common import ( | |
18 | ErrorResponse, |
|
18 | ErrorResponse, | |
19 | HTTP_SERVER_ERROR, |
|
19 | HTTP_SERVER_ERROR, | |
20 | cspvalues, |
|
20 | cspvalues, | |
21 | get_contact, |
|
21 | get_contact, | |
22 | get_mtime, |
|
22 | get_mtime, | |
23 | ismember, |
|
23 | ismember, | |
24 | paritygen, |
|
24 | paritygen, | |
25 | staticfile, |
|
25 | staticfile, | |
26 | statusmessage, |
|
26 | statusmessage, | |
27 | ) |
|
27 | ) | |
28 |
|
28 | |||
29 | from .. import ( |
|
29 | from .. import ( | |
30 | configitems, |
|
30 | configitems, | |
31 | encoding, |
|
31 | encoding, | |
32 | error, |
|
32 | error, | |
33 | hg, |
|
33 | hg, | |
34 | profiling, |
|
34 | profiling, | |
35 | pycompat, |
|
35 | pycompat, | |
36 | scmutil, |
|
36 | scmutil, | |
37 | templater, |
|
37 | templater, | |
38 | ui as uimod, |
|
38 | ui as uimod, | |
39 | util, |
|
39 | util, | |
40 | ) |
|
40 | ) | |
41 |
|
41 | |||
42 | from . import ( |
|
42 | from . import ( | |
43 | hgweb_mod, |
|
43 | hgweb_mod, | |
44 | request as requestmod, |
|
44 | request as requestmod, | |
45 | webutil, |
|
45 | webutil, | |
46 | wsgicgi, |
|
46 | wsgicgi, | |
47 | ) |
|
47 | ) | |
48 | from ..utils import dateutil |
|
48 | from ..utils import dateutil | |
49 |
|
49 | |||
50 | def cleannames(items): |
|
50 | def cleannames(items): | |
51 | return [(util.pconvert(name).strip('/'), path) for name, path in items] |
|
51 | return [(util.pconvert(name).strip('/'), path) for name, path in items] | |
52 |
|
52 | |||
53 | def findrepos(paths): |
|
53 | def findrepos(paths): | |
54 | repos = [] |
|
54 | repos = [] | |
55 | for prefix, root in cleannames(paths): |
|
55 | for prefix, root in cleannames(paths): | |
56 | roothead, roottail = os.path.split(root) |
|
56 | roothead, roottail = os.path.split(root) | |
57 | # "foo = /bar/*" or "foo = /bar/**" lets every repo /bar/N in or below |
|
57 | # "foo = /bar/*" or "foo = /bar/**" lets every repo /bar/N in or below | |
58 | # /bar/ be served as as foo/N . |
|
58 | # /bar/ be served as as foo/N . | |
59 | # '*' will not search inside dirs with .hg (except .hg/patches), |
|
59 | # '*' will not search inside dirs with .hg (except .hg/patches), | |
60 | # '**' will search inside dirs with .hg (and thus also find subrepos). |
|
60 | # '**' will search inside dirs with .hg (and thus also find subrepos). | |
61 | try: |
|
61 | try: | |
62 | recurse = {'*': False, '**': True}[roottail] |
|
62 | recurse = {'*': False, '**': True}[roottail] | |
63 | except KeyError: |
|
63 | except KeyError: | |
64 | repos.append((prefix, root)) |
|
64 | repos.append((prefix, root)) | |
65 | continue |
|
65 | continue | |
66 | roothead = os.path.normpath(os.path.abspath(roothead)) |
|
66 | roothead = os.path.normpath(os.path.abspath(roothead)) | |
67 | paths = scmutil.walkrepos(roothead, followsym=True, recurse=recurse) |
|
67 | paths = scmutil.walkrepos(roothead, followsym=True, recurse=recurse) | |
68 | repos.extend(urlrepos(prefix, roothead, paths)) |
|
68 | repos.extend(urlrepos(prefix, roothead, paths)) | |
69 | return repos |
|
69 | return repos | |
70 |
|
70 | |||
71 | def urlrepos(prefix, roothead, paths): |
|
71 | def urlrepos(prefix, roothead, paths): | |
72 | """yield url paths and filesystem paths from a list of repo paths |
|
72 | """yield url paths and filesystem paths from a list of repo paths | |
73 |
|
73 | |||
74 | >>> conv = lambda seq: [(v, util.pconvert(p)) for v,p in seq] |
|
74 | >>> conv = lambda seq: [(v, util.pconvert(p)) for v,p in seq] | |
75 | >>> conv(urlrepos(b'hg', b'/opt', [b'/opt/r', b'/opt/r/r', b'/opt'])) |
|
75 | >>> conv(urlrepos(b'hg', b'/opt', [b'/opt/r', b'/opt/r/r', b'/opt'])) | |
76 | [('hg/r', '/opt/r'), ('hg/r/r', '/opt/r/r'), ('hg', '/opt')] |
|
76 | [('hg/r', '/opt/r'), ('hg/r/r', '/opt/r/r'), ('hg', '/opt')] | |
77 | >>> conv(urlrepos(b'', b'/opt', [b'/opt/r', b'/opt/r/r', b'/opt'])) |
|
77 | >>> conv(urlrepos(b'', b'/opt', [b'/opt/r', b'/opt/r/r', b'/opt'])) | |
78 | [('r', '/opt/r'), ('r/r', '/opt/r/r'), ('', '/opt')] |
|
78 | [('r', '/opt/r'), ('r/r', '/opt/r/r'), ('', '/opt')] | |
79 | """ |
|
79 | """ | |
80 | for path in paths: |
|
80 | for path in paths: | |
81 | path = os.path.normpath(path) |
|
81 | path = os.path.normpath(path) | |
82 | yield (prefix + '/' + |
|
82 | yield (prefix + '/' + | |
83 | util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path |
|
83 | util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path | |
84 |
|
84 | |||
85 | def readallowed(ui, req): |
|
85 | def readallowed(ui, req): | |
86 | """Check allow_read and deny_read config options of a repo's ui object |
|
86 | """Check allow_read and deny_read config options of a repo's ui object | |
87 | to determine user permissions. By default, with neither option set (or |
|
87 | to determine user permissions. By default, with neither option set (or | |
88 | both empty), allow all users to read the repo. There are two ways a |
|
88 | both empty), allow all users to read the repo. There are two ways a | |
89 | user can be denied read access: (1) deny_read is not empty, and the |
|
89 | user can be denied read access: (1) deny_read is not empty, and the | |
90 | user is unauthenticated or deny_read contains user (or *), and (2) |
|
90 | user is unauthenticated or deny_read contains user (or *), and (2) | |
91 | allow_read is not empty and the user is not in allow_read. Return True |
|
91 | allow_read is not empty and the user is not in allow_read. Return True | |
92 | if user is allowed to read the repo, else return False.""" |
|
92 | if user is allowed to read the repo, else return False.""" | |
93 |
|
93 | |||
94 | user = req.remoteuser |
|
94 | user = req.remoteuser | |
95 |
|
95 | |||
96 | deny_read = ui.configlist('web', 'deny_read', untrusted=True) |
|
96 | deny_read = ui.configlist('web', 'deny_read', untrusted=True) | |
97 | if deny_read and (not user or ismember(ui, user, deny_read)): |
|
97 | if deny_read and (not user or ismember(ui, user, deny_read)): | |
98 | return False |
|
98 | return False | |
99 |
|
99 | |||
100 | allow_read = ui.configlist('web', 'allow_read', untrusted=True) |
|
100 | allow_read = ui.configlist('web', 'allow_read', untrusted=True) | |
101 | # by default, allow reading if no allow_read option has been set |
|
101 | # by default, allow reading if no allow_read option has been set | |
102 | if not allow_read or ismember(ui, user, allow_read): |
|
102 | if not allow_read or ismember(ui, user, allow_read): | |
103 | return True |
|
103 | return True | |
104 |
|
104 | |||
105 | return False |
|
105 | return False | |
106 |
|
106 | |||
107 | def archivelist(ui, nodeid, url): |
|
107 | def archivelist(ui, nodeid, url): | |
108 | allowed = ui.configlist('web', 'allow_archive', untrusted=True) |
|
108 | allowed = ui.configlist('web', 'allow_archive', untrusted=True) | |
109 | archives = [] |
|
109 | archives = [] | |
110 |
|
110 | |||
111 | for typ, spec in hgweb_mod.archivespecs.iteritems(): |
|
111 | for typ, spec in hgweb_mod.archivespecs.iteritems(): | |
112 | if typ in allowed or ui.configbool('web', 'allow' + typ, |
|
112 | if typ in allowed or ui.configbool('web', 'allow' + typ, | |
113 | untrusted=True): |
|
113 | untrusted=True): | |
114 | archives.append({ |
|
114 | archives.append({ | |
115 | 'type': typ, |
|
115 | 'type': typ, | |
116 | 'extension': spec[2], |
|
116 | 'extension': spec[2], | |
117 | 'node': nodeid, |
|
117 | 'node': nodeid, | |
118 | 'url': url, |
|
118 | 'url': url, | |
119 | }) |
|
119 | }) | |
120 |
|
120 | |||
121 | return archives |
|
121 | return archives | |
122 |
|
122 | |||
123 | def rawindexentries(ui, repos, req, subdir=''): |
|
123 | def rawindexentries(ui, repos, req, subdir=''): | |
124 | descend = ui.configbool('web', 'descend') |
|
124 | descend = ui.configbool('web', 'descend') | |
125 | collapse = ui.configbool('web', 'collapse') |
|
125 | collapse = ui.configbool('web', 'collapse') | |
126 | seenrepos = set() |
|
126 | seenrepos = set() | |
127 | seendirs = set() |
|
127 | seendirs = set() | |
128 | for name, path in repos: |
|
128 | for name, path in repos: | |
129 |
|
129 | |||
130 | if not name.startswith(subdir): |
|
130 | if not name.startswith(subdir): | |
131 | continue |
|
131 | continue | |
132 | name = name[len(subdir):] |
|
132 | name = name[len(subdir):] | |
133 | directory = False |
|
133 | directory = False | |
134 |
|
134 | |||
135 | if '/' in name: |
|
135 | if '/' in name: | |
136 | if not descend: |
|
136 | if not descend: | |
137 | continue |
|
137 | continue | |
138 |
|
138 | |||
139 | nameparts = name.split('/') |
|
139 | nameparts = name.split('/') | |
140 | rootname = nameparts[0] |
|
140 | rootname = nameparts[0] | |
141 |
|
141 | |||
142 | if not collapse: |
|
142 | if not collapse: | |
143 | pass |
|
143 | pass | |
144 | elif rootname in seendirs: |
|
144 | elif rootname in seendirs: | |
145 | continue |
|
145 | continue | |
146 | elif rootname in seenrepos: |
|
146 | elif rootname in seenrepos: | |
147 | pass |
|
147 | pass | |
148 | else: |
|
148 | else: | |
149 | directory = True |
|
149 | directory = True | |
150 | name = rootname |
|
150 | name = rootname | |
151 |
|
151 | |||
152 | # redefine the path to refer to the directory |
|
152 | # redefine the path to refer to the directory | |
153 | discarded = '/'.join(nameparts[1:]) |
|
153 | discarded = '/'.join(nameparts[1:]) | |
154 |
|
154 | |||
155 | # remove name parts plus accompanying slash |
|
155 | # remove name parts plus accompanying slash | |
156 | path = path[:-len(discarded) - 1] |
|
156 | path = path[:-len(discarded) - 1] | |
157 |
|
157 | |||
158 | try: |
|
158 | try: | |
159 | r = hg.repository(ui, path) |
|
159 | r = hg.repository(ui, path) | |
160 | directory = False |
|
160 | directory = False | |
161 | except (IOError, error.RepoError): |
|
161 | except (IOError, error.RepoError): | |
162 | pass |
|
162 | pass | |
163 |
|
163 | |||
164 | parts = [ |
|
164 | parts = [ | |
165 | req.apppath.strip('/'), |
|
165 | req.apppath.strip('/'), | |
166 | subdir.strip('/'), |
|
166 | subdir.strip('/'), | |
167 | name.strip('/'), |
|
167 | name.strip('/'), | |
168 | ] |
|
168 | ] | |
169 | url = '/' + '/'.join(p for p in parts if p) + '/' |
|
169 | url = '/' + '/'.join(p for p in parts if p) + '/' | |
170 |
|
170 | |||
171 | # show either a directory entry or a repository |
|
171 | # show either a directory entry or a repository | |
172 | if directory: |
|
172 | if directory: | |
173 | # get the directory's time information |
|
173 | # get the directory's time information | |
174 | try: |
|
174 | try: | |
175 | d = (get_mtime(path), dateutil.makedate()[1]) |
|
175 | d = (get_mtime(path), dateutil.makedate()[1]) | |
176 | except OSError: |
|
176 | except OSError: | |
177 | continue |
|
177 | continue | |
178 |
|
178 | |||
179 | # add '/' to the name to make it obvious that |
|
179 | # add '/' to the name to make it obvious that | |
180 | # the entry is a directory, not a regular repository |
|
180 | # the entry is a directory, not a regular repository | |
181 | row = {'contact': "", |
|
181 | row = {'contact': "", | |
182 | 'contact_sort': "", |
|
182 | 'contact_sort': "", | |
183 | 'name': name + '/', |
|
183 | 'name': name + '/', | |
184 | 'name_sort': name, |
|
184 | 'name_sort': name, | |
185 | 'url': url, |
|
185 | 'url': url, | |
186 | 'description': "", |
|
186 | 'description': "", | |
187 | 'description_sort': "", |
|
187 | 'description_sort': "", | |
188 | 'lastchange': d, |
|
188 | 'lastchange': d, | |
189 | 'lastchange_sort': d[1] - d[0], |
|
189 | 'lastchange_sort': d[1] - d[0], | |
190 | 'archives': [], |
|
190 | 'archives': [], | |
191 | 'isdirectory': True, |
|
191 | 'isdirectory': True, | |
192 | 'labels': [], |
|
192 | 'labels': [], | |
193 | } |
|
193 | } | |
194 |
|
194 | |||
195 | seendirs.add(name) |
|
195 | seendirs.add(name) | |
196 | yield row |
|
196 | yield row | |
197 | continue |
|
197 | continue | |
198 |
|
198 | |||
199 | u = ui.copy() |
|
199 | u = ui.copy() | |
200 | try: |
|
200 | try: | |
201 | u.readconfig(os.path.join(path, '.hg', 'hgrc')) |
|
201 | u.readconfig(os.path.join(path, '.hg', 'hgrc')) | |
202 | except Exception as e: |
|
202 | except Exception as e: | |
203 | u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e)) |
|
203 | u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e)) | |
204 | continue |
|
204 | continue | |
205 |
|
205 | |||
206 | def get(section, name, default=uimod._unset): |
|
206 | def get(section, name, default=uimod._unset): | |
207 | return u.config(section, name, default, untrusted=True) |
|
207 | return u.config(section, name, default, untrusted=True) | |
208 |
|
208 | |||
209 | if u.configbool("web", "hidden", untrusted=True): |
|
209 | if u.configbool("web", "hidden", untrusted=True): | |
210 | continue |
|
210 | continue | |
211 |
|
211 | |||
212 | if not readallowed(u, req): |
|
212 | if not readallowed(u, req): | |
213 | continue |
|
213 | continue | |
214 |
|
214 | |||
215 | # update time with local timezone |
|
215 | # update time with local timezone | |
216 | try: |
|
216 | try: | |
217 | r = hg.repository(ui, path) |
|
217 | r = hg.repository(ui, path) | |
218 | except IOError: |
|
218 | except IOError: | |
219 | u.warn(_('error accessing repository at %s\n') % path) |
|
219 | u.warn(_('error accessing repository at %s\n') % path) | |
220 | continue |
|
220 | continue | |
221 | except error.RepoError: |
|
221 | except error.RepoError: | |
222 | u.warn(_('error accessing repository at %s\n') % path) |
|
222 | u.warn(_('error accessing repository at %s\n') % path) | |
223 | continue |
|
223 | continue | |
224 | try: |
|
224 | try: | |
225 | d = (get_mtime(r.spath), dateutil.makedate()[1]) |
|
225 | d = (get_mtime(r.spath), dateutil.makedate()[1]) | |
226 | except OSError: |
|
226 | except OSError: | |
227 | continue |
|
227 | continue | |
228 |
|
228 | |||
229 | contact = get_contact(get) |
|
229 | contact = get_contact(get) | |
230 | description = get("web", "description") |
|
230 | description = get("web", "description") | |
231 | seenrepos.add(name) |
|
231 | seenrepos.add(name) | |
232 | name = get("web", "name", name) |
|
232 | name = get("web", "name", name) | |
233 | row = {'contact': contact or "unknown", |
|
233 | row = {'contact': contact or "unknown", | |
234 | 'contact_sort': contact.upper() or "unknown", |
|
234 | 'contact_sort': contact.upper() or "unknown", | |
235 | 'name': name, |
|
235 | 'name': name, | |
236 | 'name_sort': name, |
|
236 | 'name_sort': name, | |
237 | 'url': url, |
|
237 | 'url': url, | |
238 | 'description': description or "unknown", |
|
238 | 'description': description or "unknown", | |
239 | 'description_sort': description.upper() or "unknown", |
|
239 | 'description_sort': description.upper() or "unknown", | |
240 | 'lastchange': d, |
|
240 | 'lastchange': d, | |
241 | 'lastchange_sort': d[1] - d[0], |
|
241 | 'lastchange_sort': d[1] - d[0], | |
242 | 'archives': archivelist(u, "tip", url), |
|
242 | 'archives': archivelist(u, "tip", url), | |
243 | 'isdirectory': None, |
|
243 | 'isdirectory': None, | |
244 | 'labels': u.configlist('web', 'labels', untrusted=True), |
|
244 | 'labels': u.configlist('web', 'labels', untrusted=True), | |
245 | } |
|
245 | } | |
246 |
|
246 | |||
247 | yield row |
|
247 | yield row | |
248 |
|
248 | |||
249 | def indexentries(ui, repos, req, stripecount, sortcolumn='', |
|
249 | def indexentries(ui, repos, req, stripecount, sortcolumn='', | |
250 | descending=False, subdir=''): |
|
250 | descending=False, subdir=''): | |
251 |
|
251 | |||
252 | rows = rawindexentries(ui, repos, req, subdir=subdir) |
|
252 | rows = rawindexentries(ui, repos, req, subdir=subdir) | |
253 |
|
253 | |||
254 | sortdefault = None, False |
|
254 | sortdefault = None, False | |
255 |
|
255 | |||
256 | if sortcolumn and sortdefault != (sortcolumn, descending): |
|
256 | if sortcolumn and sortdefault != (sortcolumn, descending): | |
257 | sortkey = '%s_sort' % sortcolumn |
|
257 | sortkey = '%s_sort' % sortcolumn | |
258 | rows = sorted(rows, key=lambda x: x[sortkey], |
|
258 | rows = sorted(rows, key=lambda x: x[sortkey], | |
259 | reverse=descending) |
|
259 | reverse=descending) | |
260 |
|
260 | |||
261 | for row, parity in zip(rows, paritygen(stripecount)): |
|
261 | for row, parity in zip(rows, paritygen(stripecount)): | |
262 | row['parity'] = parity |
|
262 | row['parity'] = parity | |
263 | yield row |
|
263 | yield row | |
264 |
|
264 | |||
265 | class hgwebdir(object): |
|
265 | class hgwebdir(object): | |
266 | """HTTP server for multiple repositories. |
|
266 | """HTTP server for multiple repositories. | |
267 |
|
267 | |||
268 | Given a configuration, different repositories will be served depending |
|
268 | Given a configuration, different repositories will be served depending | |
269 | on the request path. |
|
269 | on the request path. | |
270 |
|
270 | |||
271 | Instances are typically used as WSGI applications. |
|
271 | Instances are typically used as WSGI applications. | |
272 | """ |
|
272 | """ | |
273 | def __init__(self, conf, baseui=None): |
|
273 | def __init__(self, conf, baseui=None): | |
274 | self.conf = conf |
|
274 | self.conf = conf | |
275 | self.baseui = baseui |
|
275 | self.baseui = baseui | |
276 | self.ui = None |
|
276 | self.ui = None | |
277 | self.lastrefresh = 0 |
|
277 | self.lastrefresh = 0 | |
278 | self.motd = None |
|
278 | self.motd = None | |
279 | self.refresh() |
|
279 | self.refresh() | |
280 |
|
280 | |||
281 | def refresh(self): |
|
281 | def refresh(self): | |
282 | if self.ui: |
|
282 | if self.ui: | |
283 | refreshinterval = self.ui.configint('web', 'refreshinterval') |
|
283 | refreshinterval = self.ui.configint('web', 'refreshinterval') | |
284 | else: |
|
284 | else: | |
285 | item = configitems.coreitems['web']['refreshinterval'] |
|
285 | item = configitems.coreitems['web']['refreshinterval'] | |
286 | refreshinterval = item.default |
|
286 | refreshinterval = item.default | |
287 |
|
287 | |||
288 | # refreshinterval <= 0 means to always refresh. |
|
288 | # refreshinterval <= 0 means to always refresh. | |
289 | if (refreshinterval > 0 and |
|
289 | if (refreshinterval > 0 and | |
290 | self.lastrefresh + refreshinterval > time.time()): |
|
290 | self.lastrefresh + refreshinterval > time.time()): | |
291 | return |
|
291 | return | |
292 |
|
292 | |||
293 | if self.baseui: |
|
293 | if self.baseui: | |
294 | u = self.baseui.copy() |
|
294 | u = self.baseui.copy() | |
295 | else: |
|
295 | else: | |
296 | u = uimod.ui.load() |
|
296 | u = uimod.ui.load() | |
297 | u.setconfig('ui', 'report_untrusted', 'off', 'hgwebdir') |
|
297 | u.setconfig('ui', 'report_untrusted', 'off', 'hgwebdir') | |
298 | u.setconfig('ui', 'nontty', 'true', 'hgwebdir') |
|
298 | u.setconfig('ui', 'nontty', 'true', 'hgwebdir') | |
299 | # displaying bundling progress bar while serving feels wrong and may |
|
299 | # displaying bundling progress bar while serving feels wrong and may | |
300 | # break some wsgi implementations. |
|
300 | # break some wsgi implementations. | |
301 | u.setconfig('progress', 'disable', 'true', 'hgweb') |
|
301 | u.setconfig('progress', 'disable', 'true', 'hgweb') | |
302 |
|
302 | |||
303 | if not isinstance(self.conf, (dict, list, tuple)): |
|
303 | if not isinstance(self.conf, (dict, list, tuple)): | |
304 | map = {'paths': 'hgweb-paths'} |
|
304 | map = {'paths': 'hgweb-paths'} | |
305 | if not os.path.exists(self.conf): |
|
305 | if not os.path.exists(self.conf): | |
306 | raise error.Abort(_('config file %s not found!') % self.conf) |
|
306 | raise error.Abort(_('config file %s not found!') % self.conf) | |
307 | u.readconfig(self.conf, remap=map, trust=True) |
|
307 | u.readconfig(self.conf, remap=map, trust=True) | |
308 | paths = [] |
|
308 | paths = [] | |
309 | for name, ignored in u.configitems('hgweb-paths'): |
|
309 | for name, ignored in u.configitems('hgweb-paths'): | |
310 | for path in u.configlist('hgweb-paths', name): |
|
310 | for path in u.configlist('hgweb-paths', name): | |
311 | paths.append((name, path)) |
|
311 | paths.append((name, path)) | |
312 | elif isinstance(self.conf, (list, tuple)): |
|
312 | elif isinstance(self.conf, (list, tuple)): | |
313 | paths = self.conf |
|
313 | paths = self.conf | |
314 | elif isinstance(self.conf, dict): |
|
314 | elif isinstance(self.conf, dict): | |
315 | paths = self.conf.items() |
|
315 | paths = self.conf.items() | |
316 |
|
316 | |||
317 | repos = findrepos(paths) |
|
317 | repos = findrepos(paths) | |
318 | for prefix, root in u.configitems('collections'): |
|
318 | for prefix, root in u.configitems('collections'): | |
319 | prefix = util.pconvert(prefix) |
|
319 | prefix = util.pconvert(prefix) | |
320 | for path in scmutil.walkrepos(root, followsym=True): |
|
320 | for path in scmutil.walkrepos(root, followsym=True): | |
321 | repo = os.path.normpath(path) |
|
321 | repo = os.path.normpath(path) | |
322 | name = util.pconvert(repo) |
|
322 | name = util.pconvert(repo) | |
323 | if name.startswith(prefix): |
|
323 | if name.startswith(prefix): | |
324 | name = name[len(prefix):] |
|
324 | name = name[len(prefix):] | |
325 | repos.append((name.lstrip('/'), repo)) |
|
325 | repos.append((name.lstrip('/'), repo)) | |
326 |
|
326 | |||
327 | self.repos = repos |
|
327 | self.repos = repos | |
328 | self.ui = u |
|
328 | self.ui = u | |
329 | encoding.encoding = self.ui.config('web', 'encoding') |
|
329 | encoding.encoding = self.ui.config('web', 'encoding') | |
330 | self.style = self.ui.config('web', 'style') |
|
330 | self.style = self.ui.config('web', 'style') | |
331 | self.templatepath = self.ui.config('web', 'templates', untrusted=False) |
|
331 | self.templatepath = self.ui.config('web', 'templates', untrusted=False) | |
332 | self.stripecount = self.ui.config('web', 'stripes') |
|
332 | self.stripecount = self.ui.config('web', 'stripes') | |
333 | if self.stripecount: |
|
333 | if self.stripecount: | |
334 | self.stripecount = int(self.stripecount) |
|
334 | self.stripecount = int(self.stripecount) | |
335 | prefix = self.ui.config('web', 'prefix') |
|
335 | prefix = self.ui.config('web', 'prefix') | |
336 | if prefix.startswith('/'): |
|
336 | if prefix.startswith('/'): | |
337 | prefix = prefix[1:] |
|
337 | prefix = prefix[1:] | |
338 | if prefix.endswith('/'): |
|
338 | if prefix.endswith('/'): | |
339 | prefix = prefix[:-1] |
|
339 | prefix = prefix[:-1] | |
340 | self.prefix = prefix |
|
340 | self.prefix = prefix | |
341 | self.lastrefresh = time.time() |
|
341 | self.lastrefresh = time.time() | |
342 |
|
342 | |||
343 | def run(self): |
|
343 | def run(self): | |
344 | if not encoding.environ.get('GATEWAY_INTERFACE', |
|
344 | if not encoding.environ.get('GATEWAY_INTERFACE', | |
345 | '').startswith("CGI/1."): |
|
345 | '').startswith("CGI/1."): | |
346 | raise RuntimeError("This function is only intended to be " |
|
346 | raise RuntimeError("This function is only intended to be " | |
347 | "called while running as a CGI script.") |
|
347 | "called while running as a CGI script.") | |
348 | wsgicgi.launch(self) |
|
348 | wsgicgi.launch(self) | |
349 |
|
349 | |||
350 | def __call__(self, env, respond): |
|
350 | def __call__(self, env, respond): | |
351 | baseurl = self.ui.config('web', 'baseurl') |
|
351 | baseurl = self.ui.config('web', 'baseurl') | |
352 | req = requestmod.parserequestfromenv(env, altbaseurl=baseurl) |
|
352 | req = requestmod.parserequestfromenv(env, altbaseurl=baseurl) | |
353 | res = requestmod.wsgiresponse(req, respond) |
|
353 | res = requestmod.wsgiresponse(req, respond) | |
354 |
|
354 | |||
355 | return self.run_wsgi(req, res) |
|
355 | return self.run_wsgi(req, res) | |
356 |
|
356 | |||
357 | def run_wsgi(self, req, res): |
|
357 | def run_wsgi(self, req, res): | |
358 | profile = self.ui.configbool('profiling', 'enabled') |
|
358 | profile = self.ui.configbool('profiling', 'enabled') | |
359 | with profiling.profile(self.ui, enabled=profile): |
|
359 | with profiling.profile(self.ui, enabled=profile): | |
360 | try: |
|
360 | try: | |
361 | for r in self._runwsgi(req, res): |
|
361 | for r in self._runwsgi(req, res): | |
362 | yield r |
|
362 | yield r | |
363 | finally: |
|
363 | finally: | |
364 | # There are known cycles in localrepository that prevent |
|
364 | # There are known cycles in localrepository that prevent | |
365 | # those objects (and tons of held references) from being |
|
365 | # those objects (and tons of held references) from being | |
366 | # collected through normal refcounting. We mitigate those |
|
366 | # collected through normal refcounting. We mitigate those | |
367 | # leaks by performing an explicit GC on every request. |
|
367 | # leaks by performing an explicit GC on every request. | |
368 | # TODO remove this once leaks are fixed. |
|
368 | # TODO remove this once leaks are fixed. | |
369 | # TODO only run this on requests that create localrepository |
|
369 | # TODO only run this on requests that create localrepository | |
370 | # instances instead of every request. |
|
370 | # instances instead of every request. | |
371 | gc.collect() |
|
371 | gc.collect() | |
372 |
|
372 | |||
373 | def _runwsgi(self, req, res): |
|
373 | def _runwsgi(self, req, res): | |
374 | try: |
|
374 | try: | |
375 | self.refresh() |
|
375 | self.refresh() | |
376 |
|
376 | |||
377 | csp, nonce = cspvalues(self.ui) |
|
377 | csp, nonce = cspvalues(self.ui) | |
378 | if csp: |
|
378 | if csp: | |
379 | res.headers['Content-Security-Policy'] = csp |
|
379 | res.headers['Content-Security-Policy'] = csp | |
380 |
|
380 | |||
381 | virtual = req.dispatchpath.strip('/') |
|
381 | virtual = req.dispatchpath.strip('/') | |
382 | tmpl = self.templater(req, nonce) |
|
382 | tmpl = self.templater(req, nonce) | |
383 | ctype = tmpl.render('mimetype', {'encoding': encoding.encoding}) |
|
383 | ctype = tmpl.render('mimetype', {'encoding': encoding.encoding}) | |
384 |
|
384 | |||
385 | # Global defaults. These can be overridden by any handler. |
|
385 | # Global defaults. These can be overridden by any handler. | |
386 | res.status = '200 Script output follows' |
|
386 | res.status = '200 Script output follows' | |
387 | res.headers['Content-Type'] = ctype |
|
387 | res.headers['Content-Type'] = ctype | |
388 |
|
388 | |||
389 | # a static file |
|
389 | # a static file | |
390 | if virtual.startswith('static/') or 'static' in req.qsparams: |
|
390 | if virtual.startswith('static/') or 'static' in req.qsparams: | |
391 | if virtual.startswith('static/'): |
|
391 | if virtual.startswith('static/'): | |
392 | fname = virtual[7:] |
|
392 | fname = virtual[7:] | |
393 | else: |
|
393 | else: | |
394 | fname = req.qsparams['static'] |
|
394 | fname = req.qsparams['static'] | |
395 | static = self.ui.config("web", "static", None, |
|
395 | static = self.ui.config("web", "static", None, | |
396 | untrusted=False) |
|
396 | untrusted=False) | |
397 | if not static: |
|
397 | if not static: | |
398 | tp = self.templatepath or templater.templatepaths() |
|
398 | tp = self.templatepath or templater.templatepaths() | |
399 | if isinstance(tp, str): |
|
399 | if isinstance(tp, str): | |
400 | tp = [tp] |
|
400 | tp = [tp] | |
401 | static = [os.path.join(p, 'static') for p in tp] |
|
401 | static = [os.path.join(p, 'static') for p in tp] | |
402 |
|
402 | |||
403 | staticfile(static, fname, res) |
|
403 | staticfile(static, fname, res) | |
404 | return res.sendresponse() |
|
404 | return res.sendresponse() | |
405 |
|
405 | |||
406 | # top-level index |
|
406 | # top-level index | |
407 |
|
407 | |||
408 | repos = dict(self.repos) |
|
408 | repos = dict(self.repos) | |
409 |
|
409 | |||
410 | if (not virtual or virtual == 'index') and virtual not in repos: |
|
410 | if (not virtual or virtual == 'index') and virtual not in repos: | |
411 | return self.makeindex(req, res, tmpl) |
|
411 | return self.makeindex(req, res, tmpl) | |
412 |
|
412 | |||
413 | # nested indexes and hgwebs |
|
413 | # nested indexes and hgwebs | |
414 |
|
414 | |||
415 | if virtual.endswith('/index') and virtual not in repos: |
|
415 | if virtual.endswith('/index') and virtual not in repos: | |
416 | subdir = virtual[:-len('index')] |
|
416 | subdir = virtual[:-len('index')] | |
417 | if any(r.startswith(subdir) for r in repos): |
|
417 | if any(r.startswith(subdir) for r in repos): | |
418 | return self.makeindex(req, res, tmpl, subdir) |
|
418 | return self.makeindex(req, res, tmpl, subdir) | |
419 |
|
419 | |||
420 | def _virtualdirs(): |
|
420 | def _virtualdirs(): | |
421 | # Check the full virtual path, each parent, and the root ('') |
|
421 | # Check the full virtual path, each parent, and the root ('') | |
422 | if virtual != '': |
|
422 | if virtual != '': | |
423 | yield virtual |
|
423 | yield virtual | |
424 |
|
424 | |||
425 | for p in util.finddirs(virtual): |
|
425 | for p in util.finddirs(virtual): | |
426 | yield p |
|
426 | yield p | |
427 |
|
427 | |||
428 | yield '' |
|
428 | yield '' | |
429 |
|
429 | |||
430 | for virtualrepo in _virtualdirs(): |
|
430 | for virtualrepo in _virtualdirs(): | |
431 | real = repos.get(virtualrepo) |
|
431 | real = repos.get(virtualrepo) | |
432 | if real: |
|
432 | if real: | |
433 | # Re-parse the WSGI environment to take into account our |
|
433 | # Re-parse the WSGI environment to take into account our | |
434 | # repository path component. |
|
434 | # repository path component. | |
435 | req = requestmod.parserequestfromenv( |
|
435 | req = requestmod.parserequestfromenv( | |
436 | req.rawenv, reponame=virtualrepo, |
|
436 | req.rawenv, reponame=virtualrepo, | |
437 | altbaseurl=self.ui.config('web', 'baseurl')) |
|
437 | altbaseurl=self.ui.config('web', 'baseurl')) | |
438 | try: |
|
438 | try: | |
439 | # ensure caller gets private copy of ui |
|
439 | # ensure caller gets private copy of ui | |
440 | repo = hg.repository(self.ui.copy(), real) |
|
440 | repo = hg.repository(self.ui.copy(), real) | |
441 | return hgweb_mod.hgweb(repo).run_wsgi(req, res) |
|
441 | return hgweb_mod.hgweb(repo).run_wsgi(req, res) | |
442 | except IOError as inst: |
|
442 | except IOError as inst: | |
443 | msg = encoding.strtolocal(inst.strerror) |
|
443 | msg = encoding.strtolocal(inst.strerror) | |
444 | raise ErrorResponse(HTTP_SERVER_ERROR, msg) |
|
444 | raise ErrorResponse(HTTP_SERVER_ERROR, msg) | |
445 | except error.RepoError as inst: |
|
445 | except error.RepoError as inst: | |
446 | raise ErrorResponse(HTTP_SERVER_ERROR, bytes(inst)) |
|
446 | raise ErrorResponse(HTTP_SERVER_ERROR, bytes(inst)) | |
447 |
|
447 | |||
448 | # browse subdirectories |
|
448 | # browse subdirectories | |
449 | subdir = virtual + '/' |
|
449 | subdir = virtual + '/' | |
450 | if [r for r in repos if r.startswith(subdir)]: |
|
450 | if [r for r in repos if r.startswith(subdir)]: | |
451 | return self.makeindex(req, res, tmpl, subdir) |
|
451 | return self.makeindex(req, res, tmpl, subdir) | |
452 |
|
452 | |||
453 | # prefixes not found |
|
453 | # prefixes not found | |
454 | res.status = '404 Not Found' |
|
454 | res.status = '404 Not Found' | |
455 |
res.setbodygen(tmpl('notfound', repo |
|
455 | res.setbodygen(tmpl.generate('notfound', {'repo': virtual})) | |
456 | return res.sendresponse() |
|
456 | return res.sendresponse() | |
457 |
|
457 | |||
458 | except ErrorResponse as e: |
|
458 | except ErrorResponse as e: | |
459 | res.status = statusmessage(e.code, pycompat.bytestr(e)) |
|
459 | res.status = statusmessage(e.code, pycompat.bytestr(e)) | |
460 |
res.setbodygen(tmpl('error', error |
|
460 | res.setbodygen(tmpl.generate('error', {'error': e.message or ''})) | |
461 | return res.sendresponse() |
|
461 | return res.sendresponse() | |
462 | finally: |
|
462 | finally: | |
463 | tmpl = None |
|
463 | tmpl = None | |
464 |
|
464 | |||
465 | def makeindex(self, req, res, tmpl, subdir=""): |
|
465 | def makeindex(self, req, res, tmpl, subdir=""): | |
466 | self.refresh() |
|
466 | self.refresh() | |
467 | sortable = ["name", "description", "contact", "lastchange"] |
|
467 | sortable = ["name", "description", "contact", "lastchange"] | |
468 | sortcolumn, descending = None, False |
|
468 | sortcolumn, descending = None, False | |
469 | if 'sort' in req.qsparams: |
|
469 | if 'sort' in req.qsparams: | |
470 | sortcolumn = req.qsparams['sort'] |
|
470 | sortcolumn = req.qsparams['sort'] | |
471 | descending = sortcolumn.startswith('-') |
|
471 | descending = sortcolumn.startswith('-') | |
472 | if descending: |
|
472 | if descending: | |
473 | sortcolumn = sortcolumn[1:] |
|
473 | sortcolumn = sortcolumn[1:] | |
474 | if sortcolumn not in sortable: |
|
474 | if sortcolumn not in sortable: | |
475 | sortcolumn = "" |
|
475 | sortcolumn = "" | |
476 |
|
476 | |||
477 | sort = [("sort_%s" % column, |
|
477 | sort = [("sort_%s" % column, | |
478 | "%s%s" % ((not descending and column == sortcolumn) |
|
478 | "%s%s" % ((not descending and column == sortcolumn) | |
479 | and "-" or "", column)) |
|
479 | and "-" or "", column)) | |
480 | for column in sortable] |
|
480 | for column in sortable] | |
481 |
|
481 | |||
482 | self.refresh() |
|
482 | self.refresh() | |
483 |
|
483 | |||
484 | entries = indexentries(self.ui, self.repos, req, |
|
484 | entries = indexentries(self.ui, self.repos, req, | |
485 | self.stripecount, sortcolumn=sortcolumn, |
|
485 | self.stripecount, sortcolumn=sortcolumn, | |
486 | descending=descending, subdir=subdir) |
|
486 | descending=descending, subdir=subdir) | |
487 |
|
487 | |||
488 | res.setbodygen(tmpl( |
|
488 | mapping = { | |
489 |
' |
|
489 | 'entries': entries, | |
490 | entries=entries, |
|
490 | 'subdir': subdir, | |
491 | subdir=subdir, |
|
491 | 'pathdef': hgweb_mod.makebreadcrumb('/' + subdir, self.prefix), | |
492 | pathdef=hgweb_mod.makebreadcrumb('/' + subdir, self.prefix), |
|
492 | 'sortcolumn': sortcolumn, | |
493 | sortcolumn=sortcolumn, |
|
493 | 'descending': descending, | |
494 | descending=descending, |
|
494 | } | |
495 | **dict(sort))) |
|
495 | mapping.update(sort) | |
496 |
|
496 | res.setbodygen(tmpl.generate('index', mapping)) | ||
497 | return res.sendresponse() |
|
497 | return res.sendresponse() | |
498 |
|
498 | |||
499 | def templater(self, req, nonce): |
|
499 | def templater(self, req, nonce): | |
500 |
|
500 | |||
501 | def motd(**map): |
|
501 | def motd(**map): | |
502 | if self.motd is not None: |
|
502 | if self.motd is not None: | |
503 | yield self.motd |
|
503 | yield self.motd | |
504 | else: |
|
504 | else: | |
505 | yield config('web', 'motd') |
|
505 | yield config('web', 'motd') | |
506 |
|
506 | |||
507 | def config(section, name, default=uimod._unset, untrusted=True): |
|
507 | def config(section, name, default=uimod._unset, untrusted=True): | |
508 | return self.ui.config(section, name, default, untrusted) |
|
508 | return self.ui.config(section, name, default, untrusted) | |
509 |
|
509 | |||
510 | vars = {} |
|
510 | vars = {} | |
511 | styles, (style, mapfile) = hgweb_mod.getstyle(req, config, |
|
511 | styles, (style, mapfile) = hgweb_mod.getstyle(req, config, | |
512 | self.templatepath) |
|
512 | self.templatepath) | |
513 | if style == styles[0]: |
|
513 | if style == styles[0]: | |
514 | vars['style'] = style |
|
514 | vars['style'] = style | |
515 |
|
515 | |||
516 | sessionvars = webutil.sessionvars(vars, r'?') |
|
516 | sessionvars = webutil.sessionvars(vars, r'?') | |
517 | logourl = config('web', 'logourl') |
|
517 | logourl = config('web', 'logourl') | |
518 | logoimg = config('web', 'logoimg') |
|
518 | logoimg = config('web', 'logoimg') | |
519 | staticurl = (config('web', 'staticurl') |
|
519 | staticurl = (config('web', 'staticurl') | |
520 | or req.apppath + '/static/') |
|
520 | or req.apppath + '/static/') | |
521 | if not staticurl.endswith('/'): |
|
521 | if not staticurl.endswith('/'): | |
522 | staticurl += '/' |
|
522 | staticurl += '/' | |
523 |
|
523 | |||
524 | defaults = { |
|
524 | defaults = { | |
525 | "encoding": encoding.encoding, |
|
525 | "encoding": encoding.encoding, | |
526 | "motd": motd, |
|
526 | "motd": motd, | |
527 | "url": req.apppath + '/', |
|
527 | "url": req.apppath + '/', | |
528 | "logourl": logourl, |
|
528 | "logourl": logourl, | |
529 | "logoimg": logoimg, |
|
529 | "logoimg": logoimg, | |
530 | "staticurl": staticurl, |
|
530 | "staticurl": staticurl, | |
531 | "sessionvars": sessionvars, |
|
531 | "sessionvars": sessionvars, | |
532 | "style": style, |
|
532 | "style": style, | |
533 | "nonce": nonce, |
|
533 | "nonce": nonce, | |
534 | } |
|
534 | } | |
535 | tmpl = templater.templater.frommapfile(mapfile, defaults=defaults) |
|
535 | tmpl = templater.templater.frommapfile(mapfile, defaults=defaults) | |
536 | return tmpl |
|
536 | return tmpl |
@@ -1,1486 +1,1485 b'' | |||||
1 | # |
|
1 | # | |
2 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
2 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> | |
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | import copy |
|
10 | import copy | |
11 | import mimetypes |
|
11 | import mimetypes | |
12 | import os |
|
12 | import os | |
13 | import re |
|
13 | import re | |
14 |
|
14 | |||
15 | from ..i18n import _ |
|
15 | from ..i18n import _ | |
16 | from ..node import hex, nullid, short |
|
16 | from ..node import hex, nullid, short | |
17 |
|
17 | |||
18 | from .common import ( |
|
18 | from .common import ( | |
19 | ErrorResponse, |
|
19 | ErrorResponse, | |
20 | HTTP_FORBIDDEN, |
|
20 | HTTP_FORBIDDEN, | |
21 | HTTP_NOT_FOUND, |
|
21 | HTTP_NOT_FOUND, | |
22 | get_contact, |
|
22 | get_contact, | |
23 | paritygen, |
|
23 | paritygen, | |
24 | staticfile, |
|
24 | staticfile, | |
25 | ) |
|
25 | ) | |
26 |
|
26 | |||
27 | from .. import ( |
|
27 | from .. import ( | |
28 | archival, |
|
28 | archival, | |
29 | dagop, |
|
29 | dagop, | |
30 | encoding, |
|
30 | encoding, | |
31 | error, |
|
31 | error, | |
32 | graphmod, |
|
32 | graphmod, | |
33 | pycompat, |
|
33 | pycompat, | |
34 | revset, |
|
34 | revset, | |
35 | revsetlang, |
|
35 | revsetlang, | |
36 | scmutil, |
|
36 | scmutil, | |
37 | smartset, |
|
37 | smartset, | |
38 | templater, |
|
38 | templater, | |
39 | util, |
|
39 | util, | |
40 | ) |
|
40 | ) | |
41 |
|
41 | |||
42 | from . import ( |
|
42 | from . import ( | |
43 | webutil, |
|
43 | webutil, | |
44 | ) |
|
44 | ) | |
45 |
|
45 | |||
46 | __all__ = [] |
|
46 | __all__ = [] | |
47 | commands = {} |
|
47 | commands = {} | |
48 |
|
48 | |||
49 | class webcommand(object): |
|
49 | class webcommand(object): | |
50 | """Decorator used to register a web command handler. |
|
50 | """Decorator used to register a web command handler. | |
51 |
|
51 | |||
52 | The decorator takes as its positional arguments the name/path the |
|
52 | The decorator takes as its positional arguments the name/path the | |
53 | command should be accessible under. |
|
53 | command should be accessible under. | |
54 |
|
54 | |||
55 | When called, functions receive as arguments a ``requestcontext``, |
|
55 | When called, functions receive as arguments a ``requestcontext``, | |
56 | ``wsgirequest``, and a templater instance for generatoring output. |
|
56 | ``wsgirequest``, and a templater instance for generatoring output. | |
57 | The functions should populate the ``rctx.res`` object with details |
|
57 | The functions should populate the ``rctx.res`` object with details | |
58 | about the HTTP response. |
|
58 | about the HTTP response. | |
59 |
|
59 | |||
60 | The function returns a generator to be consumed by the WSGI application. |
|
60 | The function returns a generator to be consumed by the WSGI application. | |
61 | For most commands, this should be the result from |
|
61 | For most commands, this should be the result from | |
62 | ``web.res.sendresponse()``. Many commands will call ``web.sendtemplate()`` |
|
62 | ``web.res.sendresponse()``. Many commands will call ``web.sendtemplate()`` | |
63 | to render a template. |
|
63 | to render a template. | |
64 |
|
64 | |||
65 | Usage: |
|
65 | Usage: | |
66 |
|
66 | |||
67 | @webcommand('mycommand') |
|
67 | @webcommand('mycommand') | |
68 | def mycommand(web): |
|
68 | def mycommand(web): | |
69 | pass |
|
69 | pass | |
70 | """ |
|
70 | """ | |
71 |
|
71 | |||
72 | def __init__(self, name): |
|
72 | def __init__(self, name): | |
73 | self.name = name |
|
73 | self.name = name | |
74 |
|
74 | |||
75 | def __call__(self, func): |
|
75 | def __call__(self, func): | |
76 | __all__.append(self.name) |
|
76 | __all__.append(self.name) | |
77 | commands[self.name] = func |
|
77 | commands[self.name] = func | |
78 | return func |
|
78 | return func | |
79 |
|
79 | |||
80 | @webcommand('log') |
|
80 | @webcommand('log') | |
81 | def log(web): |
|
81 | def log(web): | |
82 | """ |
|
82 | """ | |
83 | /log[/{revision}[/{path}]] |
|
83 | /log[/{revision}[/{path}]] | |
84 | -------------------------- |
|
84 | -------------------------- | |
85 |
|
85 | |||
86 | Show repository or file history. |
|
86 | Show repository or file history. | |
87 |
|
87 | |||
88 | For URLs of the form ``/log/{revision}``, a list of changesets starting at |
|
88 | For URLs of the form ``/log/{revision}``, a list of changesets starting at | |
89 | the specified changeset identifier is shown. If ``{revision}`` is not |
|
89 | the specified changeset identifier is shown. If ``{revision}`` is not | |
90 | defined, the default is ``tip``. This form is equivalent to the |
|
90 | defined, the default is ``tip``. This form is equivalent to the | |
91 | ``changelog`` handler. |
|
91 | ``changelog`` handler. | |
92 |
|
92 | |||
93 | For URLs of the form ``/log/{revision}/{file}``, the history for a specific |
|
93 | For URLs of the form ``/log/{revision}/{file}``, the history for a specific | |
94 | file will be shown. This form is equivalent to the ``filelog`` handler. |
|
94 | file will be shown. This form is equivalent to the ``filelog`` handler. | |
95 | """ |
|
95 | """ | |
96 |
|
96 | |||
97 | if web.req.qsparams.get('file'): |
|
97 | if web.req.qsparams.get('file'): | |
98 | return filelog(web) |
|
98 | return filelog(web) | |
99 | else: |
|
99 | else: | |
100 | return changelog(web) |
|
100 | return changelog(web) | |
101 |
|
101 | |||
102 | @webcommand('rawfile') |
|
102 | @webcommand('rawfile') | |
103 | def rawfile(web): |
|
103 | def rawfile(web): | |
104 | guessmime = web.configbool('web', 'guessmime') |
|
104 | guessmime = web.configbool('web', 'guessmime') | |
105 |
|
105 | |||
106 | path = webutil.cleanpath(web.repo, web.req.qsparams.get('file', '')) |
|
106 | path = webutil.cleanpath(web.repo, web.req.qsparams.get('file', '')) | |
107 | if not path: |
|
107 | if not path: | |
108 | return manifest(web) |
|
108 | return manifest(web) | |
109 |
|
109 | |||
110 | try: |
|
110 | try: | |
111 | fctx = webutil.filectx(web.repo, web.req) |
|
111 | fctx = webutil.filectx(web.repo, web.req) | |
112 | except error.LookupError as inst: |
|
112 | except error.LookupError as inst: | |
113 | try: |
|
113 | try: | |
114 | return manifest(web) |
|
114 | return manifest(web) | |
115 | except ErrorResponse: |
|
115 | except ErrorResponse: | |
116 | raise inst |
|
116 | raise inst | |
117 |
|
117 | |||
118 | path = fctx.path() |
|
118 | path = fctx.path() | |
119 | text = fctx.data() |
|
119 | text = fctx.data() | |
120 | mt = 'application/binary' |
|
120 | mt = 'application/binary' | |
121 | if guessmime: |
|
121 | if guessmime: | |
122 | mt = mimetypes.guess_type(path)[0] |
|
122 | mt = mimetypes.guess_type(path)[0] | |
123 | if mt is None: |
|
123 | if mt is None: | |
124 | if util.binary(text): |
|
124 | if util.binary(text): | |
125 | mt = 'application/binary' |
|
125 | mt = 'application/binary' | |
126 | else: |
|
126 | else: | |
127 | mt = 'text/plain' |
|
127 | mt = 'text/plain' | |
128 | if mt.startswith('text/'): |
|
128 | if mt.startswith('text/'): | |
129 | mt += '; charset="%s"' % encoding.encoding |
|
129 | mt += '; charset="%s"' % encoding.encoding | |
130 |
|
130 | |||
131 | web.res.headers['Content-Type'] = mt |
|
131 | web.res.headers['Content-Type'] = mt | |
132 | filename = (path.rpartition('/')[-1] |
|
132 | filename = (path.rpartition('/')[-1] | |
133 | .replace('\\', '\\\\').replace('"', '\\"')) |
|
133 | .replace('\\', '\\\\').replace('"', '\\"')) | |
134 | web.res.headers['Content-Disposition'] = 'inline; filename="%s"' % filename |
|
134 | web.res.headers['Content-Disposition'] = 'inline; filename="%s"' % filename | |
135 | web.res.setbodybytes(text) |
|
135 | web.res.setbodybytes(text) | |
136 | return web.res.sendresponse() |
|
136 | return web.res.sendresponse() | |
137 |
|
137 | |||
138 | def _filerevision(web, fctx): |
|
138 | def _filerevision(web, fctx): | |
139 | f = fctx.path() |
|
139 | f = fctx.path() | |
140 | text = fctx.data() |
|
140 | text = fctx.data() | |
141 | parity = paritygen(web.stripecount) |
|
141 | parity = paritygen(web.stripecount) | |
142 | ishead = fctx.filerev() in fctx.filelog().headrevs() |
|
142 | ishead = fctx.filerev() in fctx.filelog().headrevs() | |
143 |
|
143 | |||
144 | if util.binary(text): |
|
144 | if util.binary(text): | |
145 | mt = mimetypes.guess_type(f)[0] or 'application/octet-stream' |
|
145 | mt = mimetypes.guess_type(f)[0] or 'application/octet-stream' | |
146 | text = '(binary:%s)' % mt |
|
146 | text = '(binary:%s)' % mt | |
147 |
|
147 | |||
148 | def lines(): |
|
148 | def lines(): | |
149 | for lineno, t in enumerate(text.splitlines(True)): |
|
149 | for lineno, t in enumerate(text.splitlines(True)): | |
150 | yield {"line": t, |
|
150 | yield {"line": t, | |
151 | "lineid": "l%d" % (lineno + 1), |
|
151 | "lineid": "l%d" % (lineno + 1), | |
152 | "linenumber": "% 6d" % (lineno + 1), |
|
152 | "linenumber": "% 6d" % (lineno + 1), | |
153 | "parity": next(parity)} |
|
153 | "parity": next(parity)} | |
154 |
|
154 | |||
155 | return web.sendtemplate( |
|
155 | return web.sendtemplate( | |
156 | 'filerevision', |
|
156 | 'filerevision', | |
157 | file=f, |
|
157 | file=f, | |
158 | path=webutil.up(f), |
|
158 | path=webutil.up(f), | |
159 | text=lines(), |
|
159 | text=lines(), | |
160 | symrev=webutil.symrevorshortnode(web.req, fctx), |
|
160 | symrev=webutil.symrevorshortnode(web.req, fctx), | |
161 | rename=webutil.renamelink(fctx), |
|
161 | rename=webutil.renamelink(fctx), | |
162 | permissions=fctx.manifest().flags(f), |
|
162 | permissions=fctx.manifest().flags(f), | |
163 | ishead=int(ishead), |
|
163 | ishead=int(ishead), | |
164 | **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))) |
|
164 | **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))) | |
165 |
|
165 | |||
166 | @webcommand('file') |
|
166 | @webcommand('file') | |
167 | def file(web): |
|
167 | def file(web): | |
168 | """ |
|
168 | """ | |
169 | /file/{revision}[/{path}] |
|
169 | /file/{revision}[/{path}] | |
170 | ------------------------- |
|
170 | ------------------------- | |
171 |
|
171 | |||
172 | Show information about a directory or file in the repository. |
|
172 | Show information about a directory or file in the repository. | |
173 |
|
173 | |||
174 | Info about the ``path`` given as a URL parameter will be rendered. |
|
174 | Info about the ``path`` given as a URL parameter will be rendered. | |
175 |
|
175 | |||
176 | If ``path`` is a directory, information about the entries in that |
|
176 | If ``path`` is a directory, information about the entries in that | |
177 | directory will be rendered. This form is equivalent to the ``manifest`` |
|
177 | directory will be rendered. This form is equivalent to the ``manifest`` | |
178 | handler. |
|
178 | handler. | |
179 |
|
179 | |||
180 | If ``path`` is a file, information about that file will be shown via |
|
180 | If ``path`` is a file, information about that file will be shown via | |
181 | the ``filerevision`` template. |
|
181 | the ``filerevision`` template. | |
182 |
|
182 | |||
183 | If ``path`` is not defined, information about the root directory will |
|
183 | If ``path`` is not defined, information about the root directory will | |
184 | be rendered. |
|
184 | be rendered. | |
185 | """ |
|
185 | """ | |
186 | if web.req.qsparams.get('style') == 'raw': |
|
186 | if web.req.qsparams.get('style') == 'raw': | |
187 | return rawfile(web) |
|
187 | return rawfile(web) | |
188 |
|
188 | |||
189 | path = webutil.cleanpath(web.repo, web.req.qsparams.get('file', '')) |
|
189 | path = webutil.cleanpath(web.repo, web.req.qsparams.get('file', '')) | |
190 | if not path: |
|
190 | if not path: | |
191 | return manifest(web) |
|
191 | return manifest(web) | |
192 | try: |
|
192 | try: | |
193 | return _filerevision(web, webutil.filectx(web.repo, web.req)) |
|
193 | return _filerevision(web, webutil.filectx(web.repo, web.req)) | |
194 | except error.LookupError as inst: |
|
194 | except error.LookupError as inst: | |
195 | try: |
|
195 | try: | |
196 | return manifest(web) |
|
196 | return manifest(web) | |
197 | except ErrorResponse: |
|
197 | except ErrorResponse: | |
198 | raise inst |
|
198 | raise inst | |
199 |
|
199 | |||
200 | def _search(web): |
|
200 | def _search(web): | |
201 | MODE_REVISION = 'rev' |
|
201 | MODE_REVISION = 'rev' | |
202 | MODE_KEYWORD = 'keyword' |
|
202 | MODE_KEYWORD = 'keyword' | |
203 | MODE_REVSET = 'revset' |
|
203 | MODE_REVSET = 'revset' | |
204 |
|
204 | |||
205 | def revsearch(ctx): |
|
205 | def revsearch(ctx): | |
206 | yield ctx |
|
206 | yield ctx | |
207 |
|
207 | |||
208 | def keywordsearch(query): |
|
208 | def keywordsearch(query): | |
209 | lower = encoding.lower |
|
209 | lower = encoding.lower | |
210 | qw = lower(query).split() |
|
210 | qw = lower(query).split() | |
211 |
|
211 | |||
212 | def revgen(): |
|
212 | def revgen(): | |
213 | cl = web.repo.changelog |
|
213 | cl = web.repo.changelog | |
214 | for i in xrange(len(web.repo) - 1, 0, -100): |
|
214 | for i in xrange(len(web.repo) - 1, 0, -100): | |
215 | l = [] |
|
215 | l = [] | |
216 | for j in cl.revs(max(0, i - 99), i): |
|
216 | for j in cl.revs(max(0, i - 99), i): | |
217 | ctx = web.repo[j] |
|
217 | ctx = web.repo[j] | |
218 | l.append(ctx) |
|
218 | l.append(ctx) | |
219 | l.reverse() |
|
219 | l.reverse() | |
220 | for e in l: |
|
220 | for e in l: | |
221 | yield e |
|
221 | yield e | |
222 |
|
222 | |||
223 | for ctx in revgen(): |
|
223 | for ctx in revgen(): | |
224 | miss = 0 |
|
224 | miss = 0 | |
225 | for q in qw: |
|
225 | for q in qw: | |
226 | if not (q in lower(ctx.user()) or |
|
226 | if not (q in lower(ctx.user()) or | |
227 | q in lower(ctx.description()) or |
|
227 | q in lower(ctx.description()) or | |
228 | q in lower(" ".join(ctx.files()))): |
|
228 | q in lower(" ".join(ctx.files()))): | |
229 | miss = 1 |
|
229 | miss = 1 | |
230 | break |
|
230 | break | |
231 | if miss: |
|
231 | if miss: | |
232 | continue |
|
232 | continue | |
233 |
|
233 | |||
234 | yield ctx |
|
234 | yield ctx | |
235 |
|
235 | |||
236 | def revsetsearch(revs): |
|
236 | def revsetsearch(revs): | |
237 | for r in revs: |
|
237 | for r in revs: | |
238 | yield web.repo[r] |
|
238 | yield web.repo[r] | |
239 |
|
239 | |||
240 | searchfuncs = { |
|
240 | searchfuncs = { | |
241 | MODE_REVISION: (revsearch, 'exact revision search'), |
|
241 | MODE_REVISION: (revsearch, 'exact revision search'), | |
242 | MODE_KEYWORD: (keywordsearch, 'literal keyword search'), |
|
242 | MODE_KEYWORD: (keywordsearch, 'literal keyword search'), | |
243 | MODE_REVSET: (revsetsearch, 'revset expression search'), |
|
243 | MODE_REVSET: (revsetsearch, 'revset expression search'), | |
244 | } |
|
244 | } | |
245 |
|
245 | |||
246 | def getsearchmode(query): |
|
246 | def getsearchmode(query): | |
247 | try: |
|
247 | try: | |
248 | ctx = web.repo[query] |
|
248 | ctx = web.repo[query] | |
249 | except (error.RepoError, error.LookupError): |
|
249 | except (error.RepoError, error.LookupError): | |
250 | # query is not an exact revision pointer, need to |
|
250 | # query is not an exact revision pointer, need to | |
251 | # decide if it's a revset expression or keywords |
|
251 | # decide if it's a revset expression or keywords | |
252 | pass |
|
252 | pass | |
253 | else: |
|
253 | else: | |
254 | return MODE_REVISION, ctx |
|
254 | return MODE_REVISION, ctx | |
255 |
|
255 | |||
256 | revdef = 'reverse(%s)' % query |
|
256 | revdef = 'reverse(%s)' % query | |
257 | try: |
|
257 | try: | |
258 | tree = revsetlang.parse(revdef) |
|
258 | tree = revsetlang.parse(revdef) | |
259 | except error.ParseError: |
|
259 | except error.ParseError: | |
260 | # can't parse to a revset tree |
|
260 | # can't parse to a revset tree | |
261 | return MODE_KEYWORD, query |
|
261 | return MODE_KEYWORD, query | |
262 |
|
262 | |||
263 | if revsetlang.depth(tree) <= 2: |
|
263 | if revsetlang.depth(tree) <= 2: | |
264 | # no revset syntax used |
|
264 | # no revset syntax used | |
265 | return MODE_KEYWORD, query |
|
265 | return MODE_KEYWORD, query | |
266 |
|
266 | |||
267 | if any((token, (value or '')[:3]) == ('string', 're:') |
|
267 | if any((token, (value or '')[:3]) == ('string', 're:') | |
268 | for token, value, pos in revsetlang.tokenize(revdef)): |
|
268 | for token, value, pos in revsetlang.tokenize(revdef)): | |
269 | return MODE_KEYWORD, query |
|
269 | return MODE_KEYWORD, query | |
270 |
|
270 | |||
271 | funcsused = revsetlang.funcsused(tree) |
|
271 | funcsused = revsetlang.funcsused(tree) | |
272 | if not funcsused.issubset(revset.safesymbols): |
|
272 | if not funcsused.issubset(revset.safesymbols): | |
273 | return MODE_KEYWORD, query |
|
273 | return MODE_KEYWORD, query | |
274 |
|
274 | |||
275 | mfunc = revset.match(web.repo.ui, revdef, repo=web.repo) |
|
275 | mfunc = revset.match(web.repo.ui, revdef, repo=web.repo) | |
276 | try: |
|
276 | try: | |
277 | revs = mfunc(web.repo) |
|
277 | revs = mfunc(web.repo) | |
278 | return MODE_REVSET, revs |
|
278 | return MODE_REVSET, revs | |
279 | # ParseError: wrongly placed tokens, wrongs arguments, etc |
|
279 | # ParseError: wrongly placed tokens, wrongs arguments, etc | |
280 | # RepoLookupError: no such revision, e.g. in 'revision:' |
|
280 | # RepoLookupError: no such revision, e.g. in 'revision:' | |
281 | # Abort: bookmark/tag not exists |
|
281 | # Abort: bookmark/tag not exists | |
282 | # LookupError: ambiguous identifier, e.g. in '(bc)' on a large repo |
|
282 | # LookupError: ambiguous identifier, e.g. in '(bc)' on a large repo | |
283 | except (error.ParseError, error.RepoLookupError, error.Abort, |
|
283 | except (error.ParseError, error.RepoLookupError, error.Abort, | |
284 | LookupError): |
|
284 | LookupError): | |
285 | return MODE_KEYWORD, query |
|
285 | return MODE_KEYWORD, query | |
286 |
|
286 | |||
287 | def changelist(**map): |
|
287 | def changelist(**map): | |
288 | count = 0 |
|
288 | count = 0 | |
289 |
|
289 | |||
290 | for ctx in searchfunc[0](funcarg): |
|
290 | for ctx in searchfunc[0](funcarg): | |
291 | count += 1 |
|
291 | count += 1 | |
292 | n = ctx.node() |
|
292 | n = ctx.node() | |
293 | showtags = webutil.showtag(web.repo, web.tmpl, 'changelogtag', n) |
|
293 | showtags = webutil.showtag(web.repo, web.tmpl, 'changelogtag', n) | |
294 | files = webutil.listfilediffs(web.tmpl, ctx.files(), n, |
|
294 | files = webutil.listfilediffs(web.tmpl, ctx.files(), n, | |
295 | web.maxfiles) |
|
295 | web.maxfiles) | |
296 |
|
296 | |||
297 | yield web.tmpl( |
|
297 | lm = webutil.commonentry(web.repo, ctx) | |
298 | 'searchentry', |
|
298 | lm.update({ | |
299 |
parity |
|
299 | 'parity': next(parity), | |
300 |
changelogtag |
|
300 | 'changelogtag': showtags, | |
301 |
files |
|
301 | 'files': files, | |
302 | **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))) |
|
302 | }) | |
|
303 | yield web.tmpl.generate('searchentry', lm) | |||
303 |
|
304 | |||
304 | if count >= revcount: |
|
305 | if count >= revcount: | |
305 | break |
|
306 | break | |
306 |
|
307 | |||
307 | query = web.req.qsparams['rev'] |
|
308 | query = web.req.qsparams['rev'] | |
308 | revcount = web.maxchanges |
|
309 | revcount = web.maxchanges | |
309 | if 'revcount' in web.req.qsparams: |
|
310 | if 'revcount' in web.req.qsparams: | |
310 | try: |
|
311 | try: | |
311 | revcount = int(web.req.qsparams.get('revcount', revcount)) |
|
312 | revcount = int(web.req.qsparams.get('revcount', revcount)) | |
312 | revcount = max(revcount, 1) |
|
313 | revcount = max(revcount, 1) | |
313 | web.tmpl.defaults['sessionvars']['revcount'] = revcount |
|
314 | web.tmpl.defaults['sessionvars']['revcount'] = revcount | |
314 | except ValueError: |
|
315 | except ValueError: | |
315 | pass |
|
316 | pass | |
316 |
|
317 | |||
317 | lessvars = copy.copy(web.tmpl.defaults['sessionvars']) |
|
318 | lessvars = copy.copy(web.tmpl.defaults['sessionvars']) | |
318 | lessvars['revcount'] = max(revcount // 2, 1) |
|
319 | lessvars['revcount'] = max(revcount // 2, 1) | |
319 | lessvars['rev'] = query |
|
320 | lessvars['rev'] = query | |
320 | morevars = copy.copy(web.tmpl.defaults['sessionvars']) |
|
321 | morevars = copy.copy(web.tmpl.defaults['sessionvars']) | |
321 | morevars['revcount'] = revcount * 2 |
|
322 | morevars['revcount'] = revcount * 2 | |
322 | morevars['rev'] = query |
|
323 | morevars['rev'] = query | |
323 |
|
324 | |||
324 | mode, funcarg = getsearchmode(query) |
|
325 | mode, funcarg = getsearchmode(query) | |
325 |
|
326 | |||
326 | if 'forcekw' in web.req.qsparams: |
|
327 | if 'forcekw' in web.req.qsparams: | |
327 | showforcekw = '' |
|
328 | showforcekw = '' | |
328 | showunforcekw = searchfuncs[mode][1] |
|
329 | showunforcekw = searchfuncs[mode][1] | |
329 | mode = MODE_KEYWORD |
|
330 | mode = MODE_KEYWORD | |
330 | funcarg = query |
|
331 | funcarg = query | |
331 | else: |
|
332 | else: | |
332 | if mode != MODE_KEYWORD: |
|
333 | if mode != MODE_KEYWORD: | |
333 | showforcekw = searchfuncs[MODE_KEYWORD][1] |
|
334 | showforcekw = searchfuncs[MODE_KEYWORD][1] | |
334 | else: |
|
335 | else: | |
335 | showforcekw = '' |
|
336 | showforcekw = '' | |
336 | showunforcekw = '' |
|
337 | showunforcekw = '' | |
337 |
|
338 | |||
338 | searchfunc = searchfuncs[mode] |
|
339 | searchfunc = searchfuncs[mode] | |
339 |
|
340 | |||
340 | tip = web.repo['tip'] |
|
341 | tip = web.repo['tip'] | |
341 | parity = paritygen(web.stripecount) |
|
342 | parity = paritygen(web.stripecount) | |
342 |
|
343 | |||
343 | return web.sendtemplate( |
|
344 | return web.sendtemplate( | |
344 | 'search', |
|
345 | 'search', | |
345 | query=query, |
|
346 | query=query, | |
346 | node=tip.hex(), |
|
347 | node=tip.hex(), | |
347 | symrev='tip', |
|
348 | symrev='tip', | |
348 | entries=changelist, |
|
349 | entries=changelist, | |
349 | archives=web.archivelist('tip'), |
|
350 | archives=web.archivelist('tip'), | |
350 | morevars=morevars, |
|
351 | morevars=morevars, | |
351 | lessvars=lessvars, |
|
352 | lessvars=lessvars, | |
352 | modedesc=searchfunc[1], |
|
353 | modedesc=searchfunc[1], | |
353 | showforcekw=showforcekw, |
|
354 | showforcekw=showforcekw, | |
354 | showunforcekw=showunforcekw) |
|
355 | showunforcekw=showunforcekw) | |
355 |
|
356 | |||
356 | @webcommand('changelog') |
|
357 | @webcommand('changelog') | |
357 | def changelog(web, shortlog=False): |
|
358 | def changelog(web, shortlog=False): | |
358 | """ |
|
359 | """ | |
359 | /changelog[/{revision}] |
|
360 | /changelog[/{revision}] | |
360 | ----------------------- |
|
361 | ----------------------- | |
361 |
|
362 | |||
362 | Show information about multiple changesets. |
|
363 | Show information about multiple changesets. | |
363 |
|
364 | |||
364 | If the optional ``revision`` URL argument is absent, information about |
|
365 | If the optional ``revision`` URL argument is absent, information about | |
365 | all changesets starting at ``tip`` will be rendered. If the ``revision`` |
|
366 | all changesets starting at ``tip`` will be rendered. If the ``revision`` | |
366 | argument is present, changesets will be shown starting from the specified |
|
367 | argument is present, changesets will be shown starting from the specified | |
367 | revision. |
|
368 | revision. | |
368 |
|
369 | |||
369 | If ``revision`` is absent, the ``rev`` query string argument may be |
|
370 | If ``revision`` is absent, the ``rev`` query string argument may be | |
370 | defined. This will perform a search for changesets. |
|
371 | defined. This will perform a search for changesets. | |
371 |
|
372 | |||
372 | The argument for ``rev`` can be a single revision, a revision set, |
|
373 | The argument for ``rev`` can be a single revision, a revision set, | |
373 | or a literal keyword to search for in changeset data (equivalent to |
|
374 | or a literal keyword to search for in changeset data (equivalent to | |
374 | :hg:`log -k`). |
|
375 | :hg:`log -k`). | |
375 |
|
376 | |||
376 | The ``revcount`` query string argument defines the maximum numbers of |
|
377 | The ``revcount`` query string argument defines the maximum numbers of | |
377 | changesets to render. |
|
378 | changesets to render. | |
378 |
|
379 | |||
379 | For non-searches, the ``changelog`` template will be rendered. |
|
380 | For non-searches, the ``changelog`` template will be rendered. | |
380 | """ |
|
381 | """ | |
381 |
|
382 | |||
382 | query = '' |
|
383 | query = '' | |
383 | if 'node' in web.req.qsparams: |
|
384 | if 'node' in web.req.qsparams: | |
384 | ctx = webutil.changectx(web.repo, web.req) |
|
385 | ctx = webutil.changectx(web.repo, web.req) | |
385 | symrev = webutil.symrevorshortnode(web.req, ctx) |
|
386 | symrev = webutil.symrevorshortnode(web.req, ctx) | |
386 | elif 'rev' in web.req.qsparams: |
|
387 | elif 'rev' in web.req.qsparams: | |
387 | return _search(web) |
|
388 | return _search(web) | |
388 | else: |
|
389 | else: | |
389 | ctx = web.repo['tip'] |
|
390 | ctx = web.repo['tip'] | |
390 | symrev = 'tip' |
|
391 | symrev = 'tip' | |
391 |
|
392 | |||
392 | def changelist(): |
|
393 | def changelist(): | |
393 | revs = [] |
|
394 | revs = [] | |
394 | if pos != -1: |
|
395 | if pos != -1: | |
395 | revs = web.repo.changelog.revs(pos, 0) |
|
396 | revs = web.repo.changelog.revs(pos, 0) | |
396 | curcount = 0 |
|
397 | curcount = 0 | |
397 | for rev in revs: |
|
398 | for rev in revs: | |
398 | curcount += 1 |
|
399 | curcount += 1 | |
399 | if curcount > revcount + 1: |
|
400 | if curcount > revcount + 1: | |
400 | break |
|
401 | break | |
401 |
|
402 | |||
402 | entry = webutil.changelistentry(web, web.repo[rev]) |
|
403 | entry = webutil.changelistentry(web, web.repo[rev]) | |
403 | entry['parity'] = next(parity) |
|
404 | entry['parity'] = next(parity) | |
404 | yield entry |
|
405 | yield entry | |
405 |
|
406 | |||
406 | if shortlog: |
|
407 | if shortlog: | |
407 | revcount = web.maxshortchanges |
|
408 | revcount = web.maxshortchanges | |
408 | else: |
|
409 | else: | |
409 | revcount = web.maxchanges |
|
410 | revcount = web.maxchanges | |
410 |
|
411 | |||
411 | if 'revcount' in web.req.qsparams: |
|
412 | if 'revcount' in web.req.qsparams: | |
412 | try: |
|
413 | try: | |
413 | revcount = int(web.req.qsparams.get('revcount', revcount)) |
|
414 | revcount = int(web.req.qsparams.get('revcount', revcount)) | |
414 | revcount = max(revcount, 1) |
|
415 | revcount = max(revcount, 1) | |
415 | web.tmpl.defaults['sessionvars']['revcount'] = revcount |
|
416 | web.tmpl.defaults['sessionvars']['revcount'] = revcount | |
416 | except ValueError: |
|
417 | except ValueError: | |
417 | pass |
|
418 | pass | |
418 |
|
419 | |||
419 | lessvars = copy.copy(web.tmpl.defaults['sessionvars']) |
|
420 | lessvars = copy.copy(web.tmpl.defaults['sessionvars']) | |
420 | lessvars['revcount'] = max(revcount // 2, 1) |
|
421 | lessvars['revcount'] = max(revcount // 2, 1) | |
421 | morevars = copy.copy(web.tmpl.defaults['sessionvars']) |
|
422 | morevars = copy.copy(web.tmpl.defaults['sessionvars']) | |
422 | morevars['revcount'] = revcount * 2 |
|
423 | morevars['revcount'] = revcount * 2 | |
423 |
|
424 | |||
424 | count = len(web.repo) |
|
425 | count = len(web.repo) | |
425 | pos = ctx.rev() |
|
426 | pos = ctx.rev() | |
426 | parity = paritygen(web.stripecount) |
|
427 | parity = paritygen(web.stripecount) | |
427 |
|
428 | |||
428 | changenav = webutil.revnav(web.repo).gen(pos, revcount, count) |
|
429 | changenav = webutil.revnav(web.repo).gen(pos, revcount, count) | |
429 |
|
430 | |||
430 | entries = list(changelist()) |
|
431 | entries = list(changelist()) | |
431 | latestentry = entries[:1] |
|
432 | latestentry = entries[:1] | |
432 | if len(entries) > revcount: |
|
433 | if len(entries) > revcount: | |
433 | nextentry = entries[-1:] |
|
434 | nextentry = entries[-1:] | |
434 | entries = entries[:-1] |
|
435 | entries = entries[:-1] | |
435 | else: |
|
436 | else: | |
436 | nextentry = [] |
|
437 | nextentry = [] | |
437 |
|
438 | |||
438 | return web.sendtemplate( |
|
439 | return web.sendtemplate( | |
439 | 'shortlog' if shortlog else 'changelog', |
|
440 | 'shortlog' if shortlog else 'changelog', | |
440 | changenav=changenav, |
|
441 | changenav=changenav, | |
441 | node=ctx.hex(), |
|
442 | node=ctx.hex(), | |
442 | rev=pos, |
|
443 | rev=pos, | |
443 | symrev=symrev, |
|
444 | symrev=symrev, | |
444 | changesets=count, |
|
445 | changesets=count, | |
445 | entries=entries, |
|
446 | entries=entries, | |
446 | latestentry=latestentry, |
|
447 | latestentry=latestentry, | |
447 | nextentry=nextentry, |
|
448 | nextentry=nextentry, | |
448 | archives=web.archivelist('tip'), |
|
449 | archives=web.archivelist('tip'), | |
449 | revcount=revcount, |
|
450 | revcount=revcount, | |
450 | morevars=morevars, |
|
451 | morevars=morevars, | |
451 | lessvars=lessvars, |
|
452 | lessvars=lessvars, | |
452 | query=query) |
|
453 | query=query) | |
453 |
|
454 | |||
454 | @webcommand('shortlog') |
|
455 | @webcommand('shortlog') | |
455 | def shortlog(web): |
|
456 | def shortlog(web): | |
456 | """ |
|
457 | """ | |
457 | /shortlog |
|
458 | /shortlog | |
458 | --------- |
|
459 | --------- | |
459 |
|
460 | |||
460 | Show basic information about a set of changesets. |
|
461 | Show basic information about a set of changesets. | |
461 |
|
462 | |||
462 | This accepts the same parameters as the ``changelog`` handler. The only |
|
463 | This accepts the same parameters as the ``changelog`` handler. The only | |
463 | difference is the ``shortlog`` template will be rendered instead of the |
|
464 | difference is the ``shortlog`` template will be rendered instead of the | |
464 | ``changelog`` template. |
|
465 | ``changelog`` template. | |
465 | """ |
|
466 | """ | |
466 | return changelog(web, shortlog=True) |
|
467 | return changelog(web, shortlog=True) | |
467 |
|
468 | |||
468 | @webcommand('changeset') |
|
469 | @webcommand('changeset') | |
469 | def changeset(web): |
|
470 | def changeset(web): | |
470 | """ |
|
471 | """ | |
471 | /changeset[/{revision}] |
|
472 | /changeset[/{revision}] | |
472 | ----------------------- |
|
473 | ----------------------- | |
473 |
|
474 | |||
474 | Show information about a single changeset. |
|
475 | Show information about a single changeset. | |
475 |
|
476 | |||
476 | A URL path argument is the changeset identifier to show. See ``hg help |
|
477 | A URL path argument is the changeset identifier to show. See ``hg help | |
477 | revisions`` for possible values. If not defined, the ``tip`` changeset |
|
478 | revisions`` for possible values. If not defined, the ``tip`` changeset | |
478 | will be shown. |
|
479 | will be shown. | |
479 |
|
480 | |||
480 | The ``changeset`` template is rendered. Contents of the ``changesettag``, |
|
481 | The ``changeset`` template is rendered. Contents of the ``changesettag``, | |
481 | ``changesetbookmark``, ``filenodelink``, ``filenolink``, and the many |
|
482 | ``changesetbookmark``, ``filenodelink``, ``filenolink``, and the many | |
482 | templates related to diffs may all be used to produce the output. |
|
483 | templates related to diffs may all be used to produce the output. | |
483 | """ |
|
484 | """ | |
484 | ctx = webutil.changectx(web.repo, web.req) |
|
485 | ctx = webutil.changectx(web.repo, web.req) | |
485 |
|
486 | |||
486 | return web.sendtemplate( |
|
487 | return web.sendtemplate( | |
487 | 'changeset', |
|
488 | 'changeset', | |
488 | **webutil.changesetentry(web, ctx)) |
|
489 | **webutil.changesetentry(web, ctx)) | |
489 |
|
490 | |||
490 | rev = webcommand('rev')(changeset) |
|
491 | rev = webcommand('rev')(changeset) | |
491 |
|
492 | |||
492 | def decodepath(path): |
|
493 | def decodepath(path): | |
493 | """Hook for mapping a path in the repository to a path in the |
|
494 | """Hook for mapping a path in the repository to a path in the | |
494 | working copy. |
|
495 | working copy. | |
495 |
|
496 | |||
496 | Extensions (e.g., largefiles) can override this to remap files in |
|
497 | Extensions (e.g., largefiles) can override this to remap files in | |
497 | the virtual file system presented by the manifest command below.""" |
|
498 | the virtual file system presented by the manifest command below.""" | |
498 | return path |
|
499 | return path | |
499 |
|
500 | |||
500 | @webcommand('manifest') |
|
501 | @webcommand('manifest') | |
501 | def manifest(web): |
|
502 | def manifest(web): | |
502 | """ |
|
503 | """ | |
503 | /manifest[/{revision}[/{path}]] |
|
504 | /manifest[/{revision}[/{path}]] | |
504 | ------------------------------- |
|
505 | ------------------------------- | |
505 |
|
506 | |||
506 | Show information about a directory. |
|
507 | Show information about a directory. | |
507 |
|
508 | |||
508 | If the URL path arguments are omitted, information about the root |
|
509 | If the URL path arguments are omitted, information about the root | |
509 | directory for the ``tip`` changeset will be shown. |
|
510 | directory for the ``tip`` changeset will be shown. | |
510 |
|
511 | |||
511 | Because this handler can only show information for directories, it |
|
512 | Because this handler can only show information for directories, it | |
512 | is recommended to use the ``file`` handler instead, as it can handle both |
|
513 | is recommended to use the ``file`` handler instead, as it can handle both | |
513 | directories and files. |
|
514 | directories and files. | |
514 |
|
515 | |||
515 | The ``manifest`` template will be rendered for this handler. |
|
516 | The ``manifest`` template will be rendered for this handler. | |
516 | """ |
|
517 | """ | |
517 | if 'node' in web.req.qsparams: |
|
518 | if 'node' in web.req.qsparams: | |
518 | ctx = webutil.changectx(web.repo, web.req) |
|
519 | ctx = webutil.changectx(web.repo, web.req) | |
519 | symrev = webutil.symrevorshortnode(web.req, ctx) |
|
520 | symrev = webutil.symrevorshortnode(web.req, ctx) | |
520 | else: |
|
521 | else: | |
521 | ctx = web.repo['tip'] |
|
522 | ctx = web.repo['tip'] | |
522 | symrev = 'tip' |
|
523 | symrev = 'tip' | |
523 | path = webutil.cleanpath(web.repo, web.req.qsparams.get('file', '')) |
|
524 | path = webutil.cleanpath(web.repo, web.req.qsparams.get('file', '')) | |
524 | mf = ctx.manifest() |
|
525 | mf = ctx.manifest() | |
525 | node = ctx.node() |
|
526 | node = ctx.node() | |
526 |
|
527 | |||
527 | files = {} |
|
528 | files = {} | |
528 | dirs = {} |
|
529 | dirs = {} | |
529 | parity = paritygen(web.stripecount) |
|
530 | parity = paritygen(web.stripecount) | |
530 |
|
531 | |||
531 | if path and path[-1:] != "/": |
|
532 | if path and path[-1:] != "/": | |
532 | path += "/" |
|
533 | path += "/" | |
533 | l = len(path) |
|
534 | l = len(path) | |
534 | abspath = "/" + path |
|
535 | abspath = "/" + path | |
535 |
|
536 | |||
536 | for full, n in mf.iteritems(): |
|
537 | for full, n in mf.iteritems(): | |
537 | # the virtual path (working copy path) used for the full |
|
538 | # the virtual path (working copy path) used for the full | |
538 | # (repository) path |
|
539 | # (repository) path | |
539 | f = decodepath(full) |
|
540 | f = decodepath(full) | |
540 |
|
541 | |||
541 | if f[:l] != path: |
|
542 | if f[:l] != path: | |
542 | continue |
|
543 | continue | |
543 | remain = f[l:] |
|
544 | remain = f[l:] | |
544 | elements = remain.split('/') |
|
545 | elements = remain.split('/') | |
545 | if len(elements) == 1: |
|
546 | if len(elements) == 1: | |
546 | files[remain] = full |
|
547 | files[remain] = full | |
547 | else: |
|
548 | else: | |
548 | h = dirs # need to retain ref to dirs (root) |
|
549 | h = dirs # need to retain ref to dirs (root) | |
549 | for elem in elements[0:-1]: |
|
550 | for elem in elements[0:-1]: | |
550 | if elem not in h: |
|
551 | if elem not in h: | |
551 | h[elem] = {} |
|
552 | h[elem] = {} | |
552 | h = h[elem] |
|
553 | h = h[elem] | |
553 | if len(h) > 1: |
|
554 | if len(h) > 1: | |
554 | break |
|
555 | break | |
555 | h[None] = None # denotes files present |
|
556 | h[None] = None # denotes files present | |
556 |
|
557 | |||
557 | if mf and not files and not dirs: |
|
558 | if mf and not files and not dirs: | |
558 | raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path) |
|
559 | raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path) | |
559 |
|
560 | |||
560 | def filelist(**map): |
|
561 | def filelist(**map): | |
561 | for f in sorted(files): |
|
562 | for f in sorted(files): | |
562 | full = files[f] |
|
563 | full = files[f] | |
563 |
|
564 | |||
564 | fctx = ctx.filectx(full) |
|
565 | fctx = ctx.filectx(full) | |
565 | yield {"file": full, |
|
566 | yield {"file": full, | |
566 | "parity": next(parity), |
|
567 | "parity": next(parity), | |
567 | "basename": f, |
|
568 | "basename": f, | |
568 | "date": fctx.date(), |
|
569 | "date": fctx.date(), | |
569 | "size": fctx.size(), |
|
570 | "size": fctx.size(), | |
570 | "permissions": mf.flags(full)} |
|
571 | "permissions": mf.flags(full)} | |
571 |
|
572 | |||
572 | def dirlist(**map): |
|
573 | def dirlist(**map): | |
573 | for d in sorted(dirs): |
|
574 | for d in sorted(dirs): | |
574 |
|
575 | |||
575 | emptydirs = [] |
|
576 | emptydirs = [] | |
576 | h = dirs[d] |
|
577 | h = dirs[d] | |
577 | while isinstance(h, dict) and len(h) == 1: |
|
578 | while isinstance(h, dict) and len(h) == 1: | |
578 | k, v = next(iter(h.items())) |
|
579 | k, v = next(iter(h.items())) | |
579 | if v: |
|
580 | if v: | |
580 | emptydirs.append(k) |
|
581 | emptydirs.append(k) | |
581 | h = v |
|
582 | h = v | |
582 |
|
583 | |||
583 | path = "%s%s" % (abspath, d) |
|
584 | path = "%s%s" % (abspath, d) | |
584 | yield {"parity": next(parity), |
|
585 | yield {"parity": next(parity), | |
585 | "path": path, |
|
586 | "path": path, | |
586 | "emptydirs": "/".join(emptydirs), |
|
587 | "emptydirs": "/".join(emptydirs), | |
587 | "basename": d} |
|
588 | "basename": d} | |
588 |
|
589 | |||
589 | return web.sendtemplate( |
|
590 | return web.sendtemplate( | |
590 | 'manifest', |
|
591 | 'manifest', | |
591 | symrev=symrev, |
|
592 | symrev=symrev, | |
592 | path=abspath, |
|
593 | path=abspath, | |
593 | up=webutil.up(abspath), |
|
594 | up=webutil.up(abspath), | |
594 | upparity=next(parity), |
|
595 | upparity=next(parity), | |
595 | fentries=filelist, |
|
596 | fentries=filelist, | |
596 | dentries=dirlist, |
|
597 | dentries=dirlist, | |
597 | archives=web.archivelist(hex(node)), |
|
598 | archives=web.archivelist(hex(node)), | |
598 | **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))) |
|
599 | **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))) | |
599 |
|
600 | |||
600 | @webcommand('tags') |
|
601 | @webcommand('tags') | |
601 | def tags(web): |
|
602 | def tags(web): | |
602 | """ |
|
603 | """ | |
603 | /tags |
|
604 | /tags | |
604 | ----- |
|
605 | ----- | |
605 |
|
606 | |||
606 | Show information about tags. |
|
607 | Show information about tags. | |
607 |
|
608 | |||
608 | No arguments are accepted. |
|
609 | No arguments are accepted. | |
609 |
|
610 | |||
610 | The ``tags`` template is rendered. |
|
611 | The ``tags`` template is rendered. | |
611 | """ |
|
612 | """ | |
612 | i = list(reversed(web.repo.tagslist())) |
|
613 | i = list(reversed(web.repo.tagslist())) | |
613 | parity = paritygen(web.stripecount) |
|
614 | parity = paritygen(web.stripecount) | |
614 |
|
615 | |||
615 | def entries(notip, latestonly, **map): |
|
616 | def entries(notip, latestonly, **map): | |
616 | t = i |
|
617 | t = i | |
617 | if notip: |
|
618 | if notip: | |
618 | t = [(k, n) for k, n in i if k != "tip"] |
|
619 | t = [(k, n) for k, n in i if k != "tip"] | |
619 | if latestonly: |
|
620 | if latestonly: | |
620 | t = t[:1] |
|
621 | t = t[:1] | |
621 | for k, n in t: |
|
622 | for k, n in t: | |
622 | yield {"parity": next(parity), |
|
623 | yield {"parity": next(parity), | |
623 | "tag": k, |
|
624 | "tag": k, | |
624 | "date": web.repo[n].date(), |
|
625 | "date": web.repo[n].date(), | |
625 | "node": hex(n)} |
|
626 | "node": hex(n)} | |
626 |
|
627 | |||
627 | return web.sendtemplate( |
|
628 | return web.sendtemplate( | |
628 | 'tags', |
|
629 | 'tags', | |
629 | node=hex(web.repo.changelog.tip()), |
|
630 | node=hex(web.repo.changelog.tip()), | |
630 | entries=lambda **x: entries(False, False, **x), |
|
631 | entries=lambda **x: entries(False, False, **x), | |
631 | entriesnotip=lambda **x: entries(True, False, **x), |
|
632 | entriesnotip=lambda **x: entries(True, False, **x), | |
632 | latestentry=lambda **x: entries(True, True, **x)) |
|
633 | latestentry=lambda **x: entries(True, True, **x)) | |
633 |
|
634 | |||
634 | @webcommand('bookmarks') |
|
635 | @webcommand('bookmarks') | |
635 | def bookmarks(web): |
|
636 | def bookmarks(web): | |
636 | """ |
|
637 | """ | |
637 | /bookmarks |
|
638 | /bookmarks | |
638 | ---------- |
|
639 | ---------- | |
639 |
|
640 | |||
640 | Show information about bookmarks. |
|
641 | Show information about bookmarks. | |
641 |
|
642 | |||
642 | No arguments are accepted. |
|
643 | No arguments are accepted. | |
643 |
|
644 | |||
644 | The ``bookmarks`` template is rendered. |
|
645 | The ``bookmarks`` template is rendered. | |
645 | """ |
|
646 | """ | |
646 | i = [b for b in web.repo._bookmarks.items() if b[1] in web.repo] |
|
647 | i = [b for b in web.repo._bookmarks.items() if b[1] in web.repo] | |
647 | sortkey = lambda b: (web.repo[b[1]].rev(), b[0]) |
|
648 | sortkey = lambda b: (web.repo[b[1]].rev(), b[0]) | |
648 | i = sorted(i, key=sortkey, reverse=True) |
|
649 | i = sorted(i, key=sortkey, reverse=True) | |
649 | parity = paritygen(web.stripecount) |
|
650 | parity = paritygen(web.stripecount) | |
650 |
|
651 | |||
651 | def entries(latestonly, **map): |
|
652 | def entries(latestonly, **map): | |
652 | t = i |
|
653 | t = i | |
653 | if latestonly: |
|
654 | if latestonly: | |
654 | t = i[:1] |
|
655 | t = i[:1] | |
655 | for k, n in t: |
|
656 | for k, n in t: | |
656 | yield {"parity": next(parity), |
|
657 | yield {"parity": next(parity), | |
657 | "bookmark": k, |
|
658 | "bookmark": k, | |
658 | "date": web.repo[n].date(), |
|
659 | "date": web.repo[n].date(), | |
659 | "node": hex(n)} |
|
660 | "node": hex(n)} | |
660 |
|
661 | |||
661 | if i: |
|
662 | if i: | |
662 | latestrev = i[0][1] |
|
663 | latestrev = i[0][1] | |
663 | else: |
|
664 | else: | |
664 | latestrev = -1 |
|
665 | latestrev = -1 | |
665 |
|
666 | |||
666 | return web.sendtemplate( |
|
667 | return web.sendtemplate( | |
667 | 'bookmarks', |
|
668 | 'bookmarks', | |
668 | node=hex(web.repo.changelog.tip()), |
|
669 | node=hex(web.repo.changelog.tip()), | |
669 | lastchange=[{'date': web.repo[latestrev].date()}], |
|
670 | lastchange=[{'date': web.repo[latestrev].date()}], | |
670 | entries=lambda **x: entries(latestonly=False, **x), |
|
671 | entries=lambda **x: entries(latestonly=False, **x), | |
671 | latestentry=lambda **x: entries(latestonly=True, **x)) |
|
672 | latestentry=lambda **x: entries(latestonly=True, **x)) | |
672 |
|
673 | |||
673 | @webcommand('branches') |
|
674 | @webcommand('branches') | |
674 | def branches(web): |
|
675 | def branches(web): | |
675 | """ |
|
676 | """ | |
676 | /branches |
|
677 | /branches | |
677 | --------- |
|
678 | --------- | |
678 |
|
679 | |||
679 | Show information about branches. |
|
680 | Show information about branches. | |
680 |
|
681 | |||
681 | All known branches are contained in the output, even closed branches. |
|
682 | All known branches are contained in the output, even closed branches. | |
682 |
|
683 | |||
683 | No arguments are accepted. |
|
684 | No arguments are accepted. | |
684 |
|
685 | |||
685 | The ``branches`` template is rendered. |
|
686 | The ``branches`` template is rendered. | |
686 | """ |
|
687 | """ | |
687 | entries = webutil.branchentries(web.repo, web.stripecount) |
|
688 | entries = webutil.branchentries(web.repo, web.stripecount) | |
688 | latestentry = webutil.branchentries(web.repo, web.stripecount, 1) |
|
689 | latestentry = webutil.branchentries(web.repo, web.stripecount, 1) | |
689 |
|
690 | |||
690 | return web.sendtemplate( |
|
691 | return web.sendtemplate( | |
691 | 'branches', |
|
692 | 'branches', | |
692 | node=hex(web.repo.changelog.tip()), |
|
693 | node=hex(web.repo.changelog.tip()), | |
693 | entries=entries, |
|
694 | entries=entries, | |
694 | latestentry=latestentry) |
|
695 | latestentry=latestentry) | |
695 |
|
696 | |||
696 | @webcommand('summary') |
|
697 | @webcommand('summary') | |
697 | def summary(web): |
|
698 | def summary(web): | |
698 | """ |
|
699 | """ | |
699 | /summary |
|
700 | /summary | |
700 | -------- |
|
701 | -------- | |
701 |
|
702 | |||
702 | Show a summary of repository state. |
|
703 | Show a summary of repository state. | |
703 |
|
704 | |||
704 | Information about the latest changesets, bookmarks, tags, and branches |
|
705 | Information about the latest changesets, bookmarks, tags, and branches | |
705 | is captured by this handler. |
|
706 | is captured by this handler. | |
706 |
|
707 | |||
707 | The ``summary`` template is rendered. |
|
708 | The ``summary`` template is rendered. | |
708 | """ |
|
709 | """ | |
709 | i = reversed(web.repo.tagslist()) |
|
710 | i = reversed(web.repo.tagslist()) | |
710 |
|
711 | |||
711 | def tagentries(**map): |
|
712 | def tagentries(**map): | |
712 | parity = paritygen(web.stripecount) |
|
713 | parity = paritygen(web.stripecount) | |
713 | count = 0 |
|
714 | count = 0 | |
714 | for k, n in i: |
|
715 | for k, n in i: | |
715 | if k == "tip": # skip tip |
|
716 | if k == "tip": # skip tip | |
716 | continue |
|
717 | continue | |
717 |
|
718 | |||
718 | count += 1 |
|
719 | count += 1 | |
719 | if count > 10: # limit to 10 tags |
|
720 | if count > 10: # limit to 10 tags | |
720 | break |
|
721 | break | |
721 |
|
722 | |||
722 | yield web.tmpl( |
|
723 | yield web.tmpl.generate('tagentry', { | |
723 |
' |
|
724 | 'parity': next(parity), | |
724 | parity=next(parity), |
|
725 | 'tag': k, | |
725 |
|
|
726 | 'node': hex(n), | |
726 |
|
|
727 | 'date': web.repo[n].date(), | |
727 | date=web.repo[n].date()) |
|
728 | }) | |
728 |
|
729 | |||
729 | def bookmarks(**map): |
|
730 | def bookmarks(**map): | |
730 | parity = paritygen(web.stripecount) |
|
731 | parity = paritygen(web.stripecount) | |
731 | marks = [b for b in web.repo._bookmarks.items() if b[1] in web.repo] |
|
732 | marks = [b for b in web.repo._bookmarks.items() if b[1] in web.repo] | |
732 | sortkey = lambda b: (web.repo[b[1]].rev(), b[0]) |
|
733 | sortkey = lambda b: (web.repo[b[1]].rev(), b[0]) | |
733 | marks = sorted(marks, key=sortkey, reverse=True) |
|
734 | marks = sorted(marks, key=sortkey, reverse=True) | |
734 | for k, n in marks[:10]: # limit to 10 bookmarks |
|
735 | for k, n in marks[:10]: # limit to 10 bookmarks | |
735 | yield {'parity': next(parity), |
|
736 | yield {'parity': next(parity), | |
736 | 'bookmark': k, |
|
737 | 'bookmark': k, | |
737 | 'date': web.repo[n].date(), |
|
738 | 'date': web.repo[n].date(), | |
738 | 'node': hex(n)} |
|
739 | 'node': hex(n)} | |
739 |
|
740 | |||
740 | def changelist(**map): |
|
741 | def changelist(**map): | |
741 | parity = paritygen(web.stripecount, offset=start - end) |
|
742 | parity = paritygen(web.stripecount, offset=start - end) | |
742 | l = [] # build a list in forward order for efficiency |
|
743 | l = [] # build a list in forward order for efficiency | |
743 | revs = [] |
|
744 | revs = [] | |
744 | if start < end: |
|
745 | if start < end: | |
745 | revs = web.repo.changelog.revs(start, end - 1) |
|
746 | revs = web.repo.changelog.revs(start, end - 1) | |
746 | for i in revs: |
|
747 | for i in revs: | |
747 | ctx = web.repo[i] |
|
748 | ctx = web.repo[i] | |
748 |
|
749 | lm = webutil.commonentry(web.repo, ctx) | ||
749 | l.append(web.tmpl( |
|
750 | lm['parity'] = next(parity) | |
750 |
|
|
751 | l.append(web.tmpl.generate('shortlogentry', lm)) | |
751 | parity=next(parity), |
|
|||
752 | **pycompat.strkwargs(webutil.commonentry(web.repo, ctx)))) |
|
|||
753 |
|
752 | |||
754 | for entry in reversed(l): |
|
753 | for entry in reversed(l): | |
755 | yield entry |
|
754 | yield entry | |
756 |
|
755 | |||
757 | tip = web.repo['tip'] |
|
756 | tip = web.repo['tip'] | |
758 | count = len(web.repo) |
|
757 | count = len(web.repo) | |
759 | start = max(0, count - web.maxchanges) |
|
758 | start = max(0, count - web.maxchanges) | |
760 | end = min(count, start + web.maxchanges) |
|
759 | end = min(count, start + web.maxchanges) | |
761 |
|
760 | |||
762 | desc = web.config("web", "description") |
|
761 | desc = web.config("web", "description") | |
763 | if not desc: |
|
762 | if not desc: | |
764 | desc = 'unknown' |
|
763 | desc = 'unknown' | |
765 |
|
764 | |||
766 | return web.sendtemplate( |
|
765 | return web.sendtemplate( | |
767 | 'summary', |
|
766 | 'summary', | |
768 | desc=desc, |
|
767 | desc=desc, | |
769 | owner=get_contact(web.config) or 'unknown', |
|
768 | owner=get_contact(web.config) or 'unknown', | |
770 | lastchange=tip.date(), |
|
769 | lastchange=tip.date(), | |
771 | tags=tagentries, |
|
770 | tags=tagentries, | |
772 | bookmarks=bookmarks, |
|
771 | bookmarks=bookmarks, | |
773 | branches=webutil.branchentries(web.repo, web.stripecount, 10), |
|
772 | branches=webutil.branchentries(web.repo, web.stripecount, 10), | |
774 | shortlog=changelist, |
|
773 | shortlog=changelist, | |
775 | node=tip.hex(), |
|
774 | node=tip.hex(), | |
776 | symrev='tip', |
|
775 | symrev='tip', | |
777 | archives=web.archivelist('tip'), |
|
776 | archives=web.archivelist('tip'), | |
778 | labels=web.configlist('web', 'labels')) |
|
777 | labels=web.configlist('web', 'labels')) | |
779 |
|
778 | |||
780 | @webcommand('filediff') |
|
779 | @webcommand('filediff') | |
781 | def filediff(web): |
|
780 | def filediff(web): | |
782 | """ |
|
781 | """ | |
783 | /diff/{revision}/{path} |
|
782 | /diff/{revision}/{path} | |
784 | ----------------------- |
|
783 | ----------------------- | |
785 |
|
784 | |||
786 | Show how a file changed in a particular commit. |
|
785 | Show how a file changed in a particular commit. | |
787 |
|
786 | |||
788 | The ``filediff`` template is rendered. |
|
787 | The ``filediff`` template is rendered. | |
789 |
|
788 | |||
790 | This handler is registered under both the ``/diff`` and ``/filediff`` |
|
789 | This handler is registered under both the ``/diff`` and ``/filediff`` | |
791 | paths. ``/diff`` is used in modern code. |
|
790 | paths. ``/diff`` is used in modern code. | |
792 | """ |
|
791 | """ | |
793 | fctx, ctx = None, None |
|
792 | fctx, ctx = None, None | |
794 | try: |
|
793 | try: | |
795 | fctx = webutil.filectx(web.repo, web.req) |
|
794 | fctx = webutil.filectx(web.repo, web.req) | |
796 | except LookupError: |
|
795 | except LookupError: | |
797 | ctx = webutil.changectx(web.repo, web.req) |
|
796 | ctx = webutil.changectx(web.repo, web.req) | |
798 | path = webutil.cleanpath(web.repo, web.req.qsparams['file']) |
|
797 | path = webutil.cleanpath(web.repo, web.req.qsparams['file']) | |
799 | if path not in ctx.files(): |
|
798 | if path not in ctx.files(): | |
800 | raise |
|
799 | raise | |
801 |
|
800 | |||
802 | if fctx is not None: |
|
801 | if fctx is not None: | |
803 | path = fctx.path() |
|
802 | path = fctx.path() | |
804 | ctx = fctx.changectx() |
|
803 | ctx = fctx.changectx() | |
805 | basectx = ctx.p1() |
|
804 | basectx = ctx.p1() | |
806 |
|
805 | |||
807 | style = web.config('web', 'style') |
|
806 | style = web.config('web', 'style') | |
808 | if 'style' in web.req.qsparams: |
|
807 | if 'style' in web.req.qsparams: | |
809 | style = web.req.qsparams['style'] |
|
808 | style = web.req.qsparams['style'] | |
810 |
|
809 | |||
811 | diffs = webutil.diffs(web, ctx, basectx, [path], style) |
|
810 | diffs = webutil.diffs(web, ctx, basectx, [path], style) | |
812 | if fctx is not None: |
|
811 | if fctx is not None: | |
813 | rename = webutil.renamelink(fctx) |
|
812 | rename = webutil.renamelink(fctx) | |
814 | ctx = fctx |
|
813 | ctx = fctx | |
815 | else: |
|
814 | else: | |
816 | rename = [] |
|
815 | rename = [] | |
817 | ctx = ctx |
|
816 | ctx = ctx | |
818 |
|
817 | |||
819 | return web.sendtemplate( |
|
818 | return web.sendtemplate( | |
820 | 'filediff', |
|
819 | 'filediff', | |
821 | file=path, |
|
820 | file=path, | |
822 | symrev=webutil.symrevorshortnode(web.req, ctx), |
|
821 | symrev=webutil.symrevorshortnode(web.req, ctx), | |
823 | rename=rename, |
|
822 | rename=rename, | |
824 | diff=diffs, |
|
823 | diff=diffs, | |
825 | **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))) |
|
824 | **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))) | |
826 |
|
825 | |||
827 | diff = webcommand('diff')(filediff) |
|
826 | diff = webcommand('diff')(filediff) | |
828 |
|
827 | |||
829 | @webcommand('comparison') |
|
828 | @webcommand('comparison') | |
830 | def comparison(web): |
|
829 | def comparison(web): | |
831 | """ |
|
830 | """ | |
832 | /comparison/{revision}/{path} |
|
831 | /comparison/{revision}/{path} | |
833 | ----------------------------- |
|
832 | ----------------------------- | |
834 |
|
833 | |||
835 | Show a comparison between the old and new versions of a file from changes |
|
834 | Show a comparison between the old and new versions of a file from changes | |
836 | made on a particular revision. |
|
835 | made on a particular revision. | |
837 |
|
836 | |||
838 | This is similar to the ``diff`` handler. However, this form features |
|
837 | This is similar to the ``diff`` handler. However, this form features | |
839 | a split or side-by-side diff rather than a unified diff. |
|
838 | a split or side-by-side diff rather than a unified diff. | |
840 |
|
839 | |||
841 | The ``context`` query string argument can be used to control the lines of |
|
840 | The ``context`` query string argument can be used to control the lines of | |
842 | context in the diff. |
|
841 | context in the diff. | |
843 |
|
842 | |||
844 | The ``filecomparison`` template is rendered. |
|
843 | The ``filecomparison`` template is rendered. | |
845 | """ |
|
844 | """ | |
846 | ctx = webutil.changectx(web.repo, web.req) |
|
845 | ctx = webutil.changectx(web.repo, web.req) | |
847 | if 'file' not in web.req.qsparams: |
|
846 | if 'file' not in web.req.qsparams: | |
848 | raise ErrorResponse(HTTP_NOT_FOUND, 'file not given') |
|
847 | raise ErrorResponse(HTTP_NOT_FOUND, 'file not given') | |
849 | path = webutil.cleanpath(web.repo, web.req.qsparams['file']) |
|
848 | path = webutil.cleanpath(web.repo, web.req.qsparams['file']) | |
850 |
|
849 | |||
851 | parsecontext = lambda v: v == 'full' and -1 or int(v) |
|
850 | parsecontext = lambda v: v == 'full' and -1 or int(v) | |
852 | if 'context' in web.req.qsparams: |
|
851 | if 'context' in web.req.qsparams: | |
853 | context = parsecontext(web.req.qsparams['context']) |
|
852 | context = parsecontext(web.req.qsparams['context']) | |
854 | else: |
|
853 | else: | |
855 | context = parsecontext(web.config('web', 'comparisoncontext', '5')) |
|
854 | context = parsecontext(web.config('web', 'comparisoncontext', '5')) | |
856 |
|
855 | |||
857 | def filelines(f): |
|
856 | def filelines(f): | |
858 | if f.isbinary(): |
|
857 | if f.isbinary(): | |
859 | mt = mimetypes.guess_type(f.path())[0] |
|
858 | mt = mimetypes.guess_type(f.path())[0] | |
860 | if not mt: |
|
859 | if not mt: | |
861 | mt = 'application/octet-stream' |
|
860 | mt = 'application/octet-stream' | |
862 | return [_('(binary file %s, hash: %s)') % (mt, hex(f.filenode()))] |
|
861 | return [_('(binary file %s, hash: %s)') % (mt, hex(f.filenode()))] | |
863 | return f.data().splitlines() |
|
862 | return f.data().splitlines() | |
864 |
|
863 | |||
865 | fctx = None |
|
864 | fctx = None | |
866 | parent = ctx.p1() |
|
865 | parent = ctx.p1() | |
867 | leftrev = parent.rev() |
|
866 | leftrev = parent.rev() | |
868 | leftnode = parent.node() |
|
867 | leftnode = parent.node() | |
869 | rightrev = ctx.rev() |
|
868 | rightrev = ctx.rev() | |
870 | rightnode = ctx.node() |
|
869 | rightnode = ctx.node() | |
871 | if path in ctx: |
|
870 | if path in ctx: | |
872 | fctx = ctx[path] |
|
871 | fctx = ctx[path] | |
873 | rightlines = filelines(fctx) |
|
872 | rightlines = filelines(fctx) | |
874 | if path not in parent: |
|
873 | if path not in parent: | |
875 | leftlines = () |
|
874 | leftlines = () | |
876 | else: |
|
875 | else: | |
877 | pfctx = parent[path] |
|
876 | pfctx = parent[path] | |
878 | leftlines = filelines(pfctx) |
|
877 | leftlines = filelines(pfctx) | |
879 | else: |
|
878 | else: | |
880 | rightlines = () |
|
879 | rightlines = () | |
881 | pfctx = ctx.parents()[0][path] |
|
880 | pfctx = ctx.parents()[0][path] | |
882 | leftlines = filelines(pfctx) |
|
881 | leftlines = filelines(pfctx) | |
883 |
|
882 | |||
884 | comparison = webutil.compare(web.tmpl, context, leftlines, rightlines) |
|
883 | comparison = webutil.compare(web.tmpl, context, leftlines, rightlines) | |
885 | if fctx is not None: |
|
884 | if fctx is not None: | |
886 | rename = webutil.renamelink(fctx) |
|
885 | rename = webutil.renamelink(fctx) | |
887 | ctx = fctx |
|
886 | ctx = fctx | |
888 | else: |
|
887 | else: | |
889 | rename = [] |
|
888 | rename = [] | |
890 | ctx = ctx |
|
889 | ctx = ctx | |
891 |
|
890 | |||
892 | return web.sendtemplate( |
|
891 | return web.sendtemplate( | |
893 | 'filecomparison', |
|
892 | 'filecomparison', | |
894 | file=path, |
|
893 | file=path, | |
895 | symrev=webutil.symrevorshortnode(web.req, ctx), |
|
894 | symrev=webutil.symrevorshortnode(web.req, ctx), | |
896 | rename=rename, |
|
895 | rename=rename, | |
897 | leftrev=leftrev, |
|
896 | leftrev=leftrev, | |
898 | leftnode=hex(leftnode), |
|
897 | leftnode=hex(leftnode), | |
899 | rightrev=rightrev, |
|
898 | rightrev=rightrev, | |
900 | rightnode=hex(rightnode), |
|
899 | rightnode=hex(rightnode), | |
901 | comparison=comparison, |
|
900 | comparison=comparison, | |
902 | **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))) |
|
901 | **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))) | |
903 |
|
902 | |||
904 | @webcommand('annotate') |
|
903 | @webcommand('annotate') | |
905 | def annotate(web): |
|
904 | def annotate(web): | |
906 | """ |
|
905 | """ | |
907 | /annotate/{revision}/{path} |
|
906 | /annotate/{revision}/{path} | |
908 | --------------------------- |
|
907 | --------------------------- | |
909 |
|
908 | |||
910 | Show changeset information for each line in a file. |
|
909 | Show changeset information for each line in a file. | |
911 |
|
910 | |||
912 | The ``ignorews``, ``ignorewsamount``, ``ignorewseol``, and |
|
911 | The ``ignorews``, ``ignorewsamount``, ``ignorewseol``, and | |
913 | ``ignoreblanklines`` query string arguments have the same meaning as |
|
912 | ``ignoreblanklines`` query string arguments have the same meaning as | |
914 | their ``[annotate]`` config equivalents. It uses the hgrc boolean |
|
913 | their ``[annotate]`` config equivalents. It uses the hgrc boolean | |
915 | parsing logic to interpret the value. e.g. ``0`` and ``false`` are |
|
914 | parsing logic to interpret the value. e.g. ``0`` and ``false`` are | |
916 | false and ``1`` and ``true`` are true. If not defined, the server |
|
915 | false and ``1`` and ``true`` are true. If not defined, the server | |
917 | default settings are used. |
|
916 | default settings are used. | |
918 |
|
917 | |||
919 | The ``fileannotate`` template is rendered. |
|
918 | The ``fileannotate`` template is rendered. | |
920 | """ |
|
919 | """ | |
921 | fctx = webutil.filectx(web.repo, web.req) |
|
920 | fctx = webutil.filectx(web.repo, web.req) | |
922 | f = fctx.path() |
|
921 | f = fctx.path() | |
923 | parity = paritygen(web.stripecount) |
|
922 | parity = paritygen(web.stripecount) | |
924 | ishead = fctx.filerev() in fctx.filelog().headrevs() |
|
923 | ishead = fctx.filerev() in fctx.filelog().headrevs() | |
925 |
|
924 | |||
926 | # parents() is called once per line and several lines likely belong to |
|
925 | # parents() is called once per line and several lines likely belong to | |
927 | # same revision. So it is worth caching. |
|
926 | # same revision. So it is worth caching. | |
928 | # TODO there are still redundant operations within basefilectx.parents() |
|
927 | # TODO there are still redundant operations within basefilectx.parents() | |
929 | # and from the fctx.annotate() call itself that could be cached. |
|
928 | # and from the fctx.annotate() call itself that could be cached. | |
930 | parentscache = {} |
|
929 | parentscache = {} | |
931 | def parents(f): |
|
930 | def parents(f): | |
932 | rev = f.rev() |
|
931 | rev = f.rev() | |
933 | if rev not in parentscache: |
|
932 | if rev not in parentscache: | |
934 | parentscache[rev] = [] |
|
933 | parentscache[rev] = [] | |
935 | for p in f.parents(): |
|
934 | for p in f.parents(): | |
936 | entry = { |
|
935 | entry = { | |
937 | 'node': p.hex(), |
|
936 | 'node': p.hex(), | |
938 | 'rev': p.rev(), |
|
937 | 'rev': p.rev(), | |
939 | } |
|
938 | } | |
940 | parentscache[rev].append(entry) |
|
939 | parentscache[rev].append(entry) | |
941 |
|
940 | |||
942 | for p in parentscache[rev]: |
|
941 | for p in parentscache[rev]: | |
943 | yield p |
|
942 | yield p | |
944 |
|
943 | |||
945 | def annotate(**map): |
|
944 | def annotate(**map): | |
946 | if fctx.isbinary(): |
|
945 | if fctx.isbinary(): | |
947 | mt = (mimetypes.guess_type(fctx.path())[0] |
|
946 | mt = (mimetypes.guess_type(fctx.path())[0] | |
948 | or 'application/octet-stream') |
|
947 | or 'application/octet-stream') | |
949 | lines = [((fctx.filectx(fctx.filerev()), 1), '(binary:%s)' % mt)] |
|
948 | lines = [((fctx.filectx(fctx.filerev()), 1), '(binary:%s)' % mt)] | |
950 | else: |
|
949 | else: | |
951 | lines = webutil.annotate(web.req, fctx, web.repo.ui) |
|
950 | lines = webutil.annotate(web.req, fctx, web.repo.ui) | |
952 |
|
951 | |||
953 | previousrev = None |
|
952 | previousrev = None | |
954 | blockparitygen = paritygen(1) |
|
953 | blockparitygen = paritygen(1) | |
955 | for lineno, (aline, l) in enumerate(lines): |
|
954 | for lineno, (aline, l) in enumerate(lines): | |
956 | f = aline.fctx |
|
955 | f = aline.fctx | |
957 | rev = f.rev() |
|
956 | rev = f.rev() | |
958 | if rev != previousrev: |
|
957 | if rev != previousrev: | |
959 | blockhead = True |
|
958 | blockhead = True | |
960 | blockparity = next(blockparitygen) |
|
959 | blockparity = next(blockparitygen) | |
961 | else: |
|
960 | else: | |
962 | blockhead = None |
|
961 | blockhead = None | |
963 | previousrev = rev |
|
962 | previousrev = rev | |
964 | yield {"parity": next(parity), |
|
963 | yield {"parity": next(parity), | |
965 | "node": f.hex(), |
|
964 | "node": f.hex(), | |
966 | "rev": rev, |
|
965 | "rev": rev, | |
967 | "author": f.user(), |
|
966 | "author": f.user(), | |
968 | "parents": parents(f), |
|
967 | "parents": parents(f), | |
969 | "desc": f.description(), |
|
968 | "desc": f.description(), | |
970 | "extra": f.extra(), |
|
969 | "extra": f.extra(), | |
971 | "file": f.path(), |
|
970 | "file": f.path(), | |
972 | "blockhead": blockhead, |
|
971 | "blockhead": blockhead, | |
973 | "blockparity": blockparity, |
|
972 | "blockparity": blockparity, | |
974 | "targetline": aline.lineno, |
|
973 | "targetline": aline.lineno, | |
975 | "line": l, |
|
974 | "line": l, | |
976 | "lineno": lineno + 1, |
|
975 | "lineno": lineno + 1, | |
977 | "lineid": "l%d" % (lineno + 1), |
|
976 | "lineid": "l%d" % (lineno + 1), | |
978 | "linenumber": "% 6d" % (lineno + 1), |
|
977 | "linenumber": "% 6d" % (lineno + 1), | |
979 | "revdate": f.date()} |
|
978 | "revdate": f.date()} | |
980 |
|
979 | |||
981 | diffopts = webutil.difffeatureopts(web.req, web.repo.ui, 'annotate') |
|
980 | diffopts = webutil.difffeatureopts(web.req, web.repo.ui, 'annotate') | |
982 | diffopts = {k: getattr(diffopts, k) for k in diffopts.defaults} |
|
981 | diffopts = {k: getattr(diffopts, k) for k in diffopts.defaults} | |
983 |
|
982 | |||
984 | return web.sendtemplate( |
|
983 | return web.sendtemplate( | |
985 | 'fileannotate', |
|
984 | 'fileannotate', | |
986 | file=f, |
|
985 | file=f, | |
987 | annotate=annotate, |
|
986 | annotate=annotate, | |
988 | path=webutil.up(f), |
|
987 | path=webutil.up(f), | |
989 | symrev=webutil.symrevorshortnode(web.req, fctx), |
|
988 | symrev=webutil.symrevorshortnode(web.req, fctx), | |
990 | rename=webutil.renamelink(fctx), |
|
989 | rename=webutil.renamelink(fctx), | |
991 | permissions=fctx.manifest().flags(f), |
|
990 | permissions=fctx.manifest().flags(f), | |
992 | ishead=int(ishead), |
|
991 | ishead=int(ishead), | |
993 | diffopts=diffopts, |
|
992 | diffopts=diffopts, | |
994 | **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))) |
|
993 | **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))) | |
995 |
|
994 | |||
996 | @webcommand('filelog') |
|
995 | @webcommand('filelog') | |
997 | def filelog(web): |
|
996 | def filelog(web): | |
998 | """ |
|
997 | """ | |
999 | /filelog/{revision}/{path} |
|
998 | /filelog/{revision}/{path} | |
1000 | -------------------------- |
|
999 | -------------------------- | |
1001 |
|
1000 | |||
1002 | Show information about the history of a file in the repository. |
|
1001 | Show information about the history of a file in the repository. | |
1003 |
|
1002 | |||
1004 | The ``revcount`` query string argument can be defined to control the |
|
1003 | The ``revcount`` query string argument can be defined to control the | |
1005 | maximum number of entries to show. |
|
1004 | maximum number of entries to show. | |
1006 |
|
1005 | |||
1007 | The ``filelog`` template will be rendered. |
|
1006 | The ``filelog`` template will be rendered. | |
1008 | """ |
|
1007 | """ | |
1009 |
|
1008 | |||
1010 | try: |
|
1009 | try: | |
1011 | fctx = webutil.filectx(web.repo, web.req) |
|
1010 | fctx = webutil.filectx(web.repo, web.req) | |
1012 | f = fctx.path() |
|
1011 | f = fctx.path() | |
1013 | fl = fctx.filelog() |
|
1012 | fl = fctx.filelog() | |
1014 | except error.LookupError: |
|
1013 | except error.LookupError: | |
1015 | f = webutil.cleanpath(web.repo, web.req.qsparams['file']) |
|
1014 | f = webutil.cleanpath(web.repo, web.req.qsparams['file']) | |
1016 | fl = web.repo.file(f) |
|
1015 | fl = web.repo.file(f) | |
1017 | numrevs = len(fl) |
|
1016 | numrevs = len(fl) | |
1018 | if not numrevs: # file doesn't exist at all |
|
1017 | if not numrevs: # file doesn't exist at all | |
1019 | raise |
|
1018 | raise | |
1020 | rev = webutil.changectx(web.repo, web.req).rev() |
|
1019 | rev = webutil.changectx(web.repo, web.req).rev() | |
1021 | first = fl.linkrev(0) |
|
1020 | first = fl.linkrev(0) | |
1022 | if rev < first: # current rev is from before file existed |
|
1021 | if rev < first: # current rev is from before file existed | |
1023 | raise |
|
1022 | raise | |
1024 | frev = numrevs - 1 |
|
1023 | frev = numrevs - 1 | |
1025 | while fl.linkrev(frev) > rev: |
|
1024 | while fl.linkrev(frev) > rev: | |
1026 | frev -= 1 |
|
1025 | frev -= 1 | |
1027 | fctx = web.repo.filectx(f, fl.linkrev(frev)) |
|
1026 | fctx = web.repo.filectx(f, fl.linkrev(frev)) | |
1028 |
|
1027 | |||
1029 | revcount = web.maxshortchanges |
|
1028 | revcount = web.maxshortchanges | |
1030 | if 'revcount' in web.req.qsparams: |
|
1029 | if 'revcount' in web.req.qsparams: | |
1031 | try: |
|
1030 | try: | |
1032 | revcount = int(web.req.qsparams.get('revcount', revcount)) |
|
1031 | revcount = int(web.req.qsparams.get('revcount', revcount)) | |
1033 | revcount = max(revcount, 1) |
|
1032 | revcount = max(revcount, 1) | |
1034 | web.tmpl.defaults['sessionvars']['revcount'] = revcount |
|
1033 | web.tmpl.defaults['sessionvars']['revcount'] = revcount | |
1035 | except ValueError: |
|
1034 | except ValueError: | |
1036 | pass |
|
1035 | pass | |
1037 |
|
1036 | |||
1038 | lrange = webutil.linerange(web.req) |
|
1037 | lrange = webutil.linerange(web.req) | |
1039 |
|
1038 | |||
1040 | lessvars = copy.copy(web.tmpl.defaults['sessionvars']) |
|
1039 | lessvars = copy.copy(web.tmpl.defaults['sessionvars']) | |
1041 | lessvars['revcount'] = max(revcount // 2, 1) |
|
1040 | lessvars['revcount'] = max(revcount // 2, 1) | |
1042 | morevars = copy.copy(web.tmpl.defaults['sessionvars']) |
|
1041 | morevars = copy.copy(web.tmpl.defaults['sessionvars']) | |
1043 | morevars['revcount'] = revcount * 2 |
|
1042 | morevars['revcount'] = revcount * 2 | |
1044 |
|
1043 | |||
1045 | patch = 'patch' in web.req.qsparams |
|
1044 | patch = 'patch' in web.req.qsparams | |
1046 | if patch: |
|
1045 | if patch: | |
1047 | lessvars['patch'] = morevars['patch'] = web.req.qsparams['patch'] |
|
1046 | lessvars['patch'] = morevars['patch'] = web.req.qsparams['patch'] | |
1048 | descend = 'descend' in web.req.qsparams |
|
1047 | descend = 'descend' in web.req.qsparams | |
1049 | if descend: |
|
1048 | if descend: | |
1050 | lessvars['descend'] = morevars['descend'] = web.req.qsparams['descend'] |
|
1049 | lessvars['descend'] = morevars['descend'] = web.req.qsparams['descend'] | |
1051 |
|
1050 | |||
1052 | count = fctx.filerev() + 1 |
|
1051 | count = fctx.filerev() + 1 | |
1053 | start = max(0, count - revcount) # first rev on this page |
|
1052 | start = max(0, count - revcount) # first rev on this page | |
1054 | end = min(count, start + revcount) # last rev on this page |
|
1053 | end = min(count, start + revcount) # last rev on this page | |
1055 | parity = paritygen(web.stripecount, offset=start - end) |
|
1054 | parity = paritygen(web.stripecount, offset=start - end) | |
1056 |
|
1055 | |||
1057 | repo = web.repo |
|
1056 | repo = web.repo | |
1058 | revs = fctx.filelog().revs(start, end - 1) |
|
1057 | revs = fctx.filelog().revs(start, end - 1) | |
1059 | entries = [] |
|
1058 | entries = [] | |
1060 |
|
1059 | |||
1061 | diffstyle = web.config('web', 'style') |
|
1060 | diffstyle = web.config('web', 'style') | |
1062 | if 'style' in web.req.qsparams: |
|
1061 | if 'style' in web.req.qsparams: | |
1063 | diffstyle = web.req.qsparams['style'] |
|
1062 | diffstyle = web.req.qsparams['style'] | |
1064 |
|
1063 | |||
1065 | def diff(fctx, linerange=None): |
|
1064 | def diff(fctx, linerange=None): | |
1066 | ctx = fctx.changectx() |
|
1065 | ctx = fctx.changectx() | |
1067 | basectx = ctx.p1() |
|
1066 | basectx = ctx.p1() | |
1068 | path = fctx.path() |
|
1067 | path = fctx.path() | |
1069 | return webutil.diffs(web, ctx, basectx, [path], diffstyle, |
|
1068 | return webutil.diffs(web, ctx, basectx, [path], diffstyle, | |
1070 | linerange=linerange, |
|
1069 | linerange=linerange, | |
1071 | lineidprefix='%s-' % ctx.hex()[:12]) |
|
1070 | lineidprefix='%s-' % ctx.hex()[:12]) | |
1072 |
|
1071 | |||
1073 | linerange = None |
|
1072 | linerange = None | |
1074 | if lrange is not None: |
|
1073 | if lrange is not None: | |
1075 | linerange = webutil.formatlinerange(*lrange) |
|
1074 | linerange = webutil.formatlinerange(*lrange) | |
1076 | # deactivate numeric nav links when linerange is specified as this |
|
1075 | # deactivate numeric nav links when linerange is specified as this | |
1077 | # would required a dedicated "revnav" class |
|
1076 | # would required a dedicated "revnav" class | |
1078 | nav = None |
|
1077 | nav = None | |
1079 | if descend: |
|
1078 | if descend: | |
1080 | it = dagop.blockdescendants(fctx, *lrange) |
|
1079 | it = dagop.blockdescendants(fctx, *lrange) | |
1081 | else: |
|
1080 | else: | |
1082 | it = dagop.blockancestors(fctx, *lrange) |
|
1081 | it = dagop.blockancestors(fctx, *lrange) | |
1083 | for i, (c, lr) in enumerate(it, 1): |
|
1082 | for i, (c, lr) in enumerate(it, 1): | |
1084 | diffs = None |
|
1083 | diffs = None | |
1085 | if patch: |
|
1084 | if patch: | |
1086 | diffs = diff(c, linerange=lr) |
|
1085 | diffs = diff(c, linerange=lr) | |
1087 | # follow renames accross filtered (not in range) revisions |
|
1086 | # follow renames accross filtered (not in range) revisions | |
1088 | path = c.path() |
|
1087 | path = c.path() | |
1089 | entries.append(dict( |
|
1088 | entries.append(dict( | |
1090 | parity=next(parity), |
|
1089 | parity=next(parity), | |
1091 | filerev=c.rev(), |
|
1090 | filerev=c.rev(), | |
1092 | file=path, |
|
1091 | file=path, | |
1093 | diff=diffs, |
|
1092 | diff=diffs, | |
1094 | linerange=webutil.formatlinerange(*lr), |
|
1093 | linerange=webutil.formatlinerange(*lr), | |
1095 | **pycompat.strkwargs(webutil.commonentry(repo, c)))) |
|
1094 | **pycompat.strkwargs(webutil.commonentry(repo, c)))) | |
1096 | if i == revcount: |
|
1095 | if i == revcount: | |
1097 | break |
|
1096 | break | |
1098 | lessvars['linerange'] = webutil.formatlinerange(*lrange) |
|
1097 | lessvars['linerange'] = webutil.formatlinerange(*lrange) | |
1099 | morevars['linerange'] = lessvars['linerange'] |
|
1098 | morevars['linerange'] = lessvars['linerange'] | |
1100 | else: |
|
1099 | else: | |
1101 | for i in revs: |
|
1100 | for i in revs: | |
1102 | iterfctx = fctx.filectx(i) |
|
1101 | iterfctx = fctx.filectx(i) | |
1103 | diffs = None |
|
1102 | diffs = None | |
1104 | if patch: |
|
1103 | if patch: | |
1105 | diffs = diff(iterfctx) |
|
1104 | diffs = diff(iterfctx) | |
1106 | entries.append(dict( |
|
1105 | entries.append(dict( | |
1107 | parity=next(parity), |
|
1106 | parity=next(parity), | |
1108 | filerev=i, |
|
1107 | filerev=i, | |
1109 | file=f, |
|
1108 | file=f, | |
1110 | diff=diffs, |
|
1109 | diff=diffs, | |
1111 | rename=webutil.renamelink(iterfctx), |
|
1110 | rename=webutil.renamelink(iterfctx), | |
1112 | **pycompat.strkwargs(webutil.commonentry(repo, iterfctx)))) |
|
1111 | **pycompat.strkwargs(webutil.commonentry(repo, iterfctx)))) | |
1113 | entries.reverse() |
|
1112 | entries.reverse() | |
1114 | revnav = webutil.filerevnav(web.repo, fctx.path()) |
|
1113 | revnav = webutil.filerevnav(web.repo, fctx.path()) | |
1115 | nav = revnav.gen(end - 1, revcount, count) |
|
1114 | nav = revnav.gen(end - 1, revcount, count) | |
1116 |
|
1115 | |||
1117 | latestentry = entries[:1] |
|
1116 | latestentry = entries[:1] | |
1118 |
|
1117 | |||
1119 | return web.sendtemplate( |
|
1118 | return web.sendtemplate( | |
1120 | 'filelog', |
|
1119 | 'filelog', | |
1121 | file=f, |
|
1120 | file=f, | |
1122 | nav=nav, |
|
1121 | nav=nav, | |
1123 | symrev=webutil.symrevorshortnode(web.req, fctx), |
|
1122 | symrev=webutil.symrevorshortnode(web.req, fctx), | |
1124 | entries=entries, |
|
1123 | entries=entries, | |
1125 | descend=descend, |
|
1124 | descend=descend, | |
1126 | patch=patch, |
|
1125 | patch=patch, | |
1127 | latestentry=latestentry, |
|
1126 | latestentry=latestentry, | |
1128 | linerange=linerange, |
|
1127 | linerange=linerange, | |
1129 | revcount=revcount, |
|
1128 | revcount=revcount, | |
1130 | morevars=morevars, |
|
1129 | morevars=morevars, | |
1131 | lessvars=lessvars, |
|
1130 | lessvars=lessvars, | |
1132 | **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))) |
|
1131 | **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))) | |
1133 |
|
1132 | |||
1134 | @webcommand('archive') |
|
1133 | @webcommand('archive') | |
1135 | def archive(web): |
|
1134 | def archive(web): | |
1136 | """ |
|
1135 | """ | |
1137 | /archive/{revision}.{format}[/{path}] |
|
1136 | /archive/{revision}.{format}[/{path}] | |
1138 | ------------------------------------- |
|
1137 | ------------------------------------- | |
1139 |
|
1138 | |||
1140 | Obtain an archive of repository content. |
|
1139 | Obtain an archive of repository content. | |
1141 |
|
1140 | |||
1142 | The content and type of the archive is defined by a URL path parameter. |
|
1141 | The content and type of the archive is defined by a URL path parameter. | |
1143 | ``format`` is the file extension of the archive type to be generated. e.g. |
|
1142 | ``format`` is the file extension of the archive type to be generated. e.g. | |
1144 | ``zip`` or ``tar.bz2``. Not all archive types may be allowed by your |
|
1143 | ``zip`` or ``tar.bz2``. Not all archive types may be allowed by your | |
1145 | server configuration. |
|
1144 | server configuration. | |
1146 |
|
1145 | |||
1147 | The optional ``path`` URL parameter controls content to include in the |
|
1146 | The optional ``path`` URL parameter controls content to include in the | |
1148 | archive. If omitted, every file in the specified revision is present in the |
|
1147 | archive. If omitted, every file in the specified revision is present in the | |
1149 | archive. If included, only the specified file or contents of the specified |
|
1148 | archive. If included, only the specified file or contents of the specified | |
1150 | directory will be included in the archive. |
|
1149 | directory will be included in the archive. | |
1151 |
|
1150 | |||
1152 | No template is used for this handler. Raw, binary content is generated. |
|
1151 | No template is used for this handler. Raw, binary content is generated. | |
1153 | """ |
|
1152 | """ | |
1154 |
|
1153 | |||
1155 | type_ = web.req.qsparams.get('type') |
|
1154 | type_ = web.req.qsparams.get('type') | |
1156 | allowed = web.configlist("web", "allow_archive") |
|
1155 | allowed = web.configlist("web", "allow_archive") | |
1157 | key = web.req.qsparams['node'] |
|
1156 | key = web.req.qsparams['node'] | |
1158 |
|
1157 | |||
1159 | if type_ not in web.archivespecs: |
|
1158 | if type_ not in web.archivespecs: | |
1160 | msg = 'Unsupported archive type: %s' % type_ |
|
1159 | msg = 'Unsupported archive type: %s' % type_ | |
1161 | raise ErrorResponse(HTTP_NOT_FOUND, msg) |
|
1160 | raise ErrorResponse(HTTP_NOT_FOUND, msg) | |
1162 |
|
1161 | |||
1163 | if not ((type_ in allowed or |
|
1162 | if not ((type_ in allowed or | |
1164 | web.configbool("web", "allow" + type_))): |
|
1163 | web.configbool("web", "allow" + type_))): | |
1165 | msg = 'Archive type not allowed: %s' % type_ |
|
1164 | msg = 'Archive type not allowed: %s' % type_ | |
1166 | raise ErrorResponse(HTTP_FORBIDDEN, msg) |
|
1165 | raise ErrorResponse(HTTP_FORBIDDEN, msg) | |
1167 |
|
1166 | |||
1168 | reponame = re.sub(br"\W+", "-", os.path.basename(web.reponame)) |
|
1167 | reponame = re.sub(br"\W+", "-", os.path.basename(web.reponame)) | |
1169 | cnode = web.repo.lookup(key) |
|
1168 | cnode = web.repo.lookup(key) | |
1170 | arch_version = key |
|
1169 | arch_version = key | |
1171 | if cnode == key or key == 'tip': |
|
1170 | if cnode == key or key == 'tip': | |
1172 | arch_version = short(cnode) |
|
1171 | arch_version = short(cnode) | |
1173 | name = "%s-%s" % (reponame, arch_version) |
|
1172 | name = "%s-%s" % (reponame, arch_version) | |
1174 |
|
1173 | |||
1175 | ctx = webutil.changectx(web.repo, web.req) |
|
1174 | ctx = webutil.changectx(web.repo, web.req) | |
1176 | pats = [] |
|
1175 | pats = [] | |
1177 | match = scmutil.match(ctx, []) |
|
1176 | match = scmutil.match(ctx, []) | |
1178 | file = web.req.qsparams.get('file') |
|
1177 | file = web.req.qsparams.get('file') | |
1179 | if file: |
|
1178 | if file: | |
1180 | pats = ['path:' + file] |
|
1179 | pats = ['path:' + file] | |
1181 | match = scmutil.match(ctx, pats, default='path') |
|
1180 | match = scmutil.match(ctx, pats, default='path') | |
1182 | if pats: |
|
1181 | if pats: | |
1183 | files = [f for f in ctx.manifest().keys() if match(f)] |
|
1182 | files = [f for f in ctx.manifest().keys() if match(f)] | |
1184 | if not files: |
|
1183 | if not files: | |
1185 | raise ErrorResponse(HTTP_NOT_FOUND, |
|
1184 | raise ErrorResponse(HTTP_NOT_FOUND, | |
1186 | 'file(s) not found: %s' % file) |
|
1185 | 'file(s) not found: %s' % file) | |
1187 |
|
1186 | |||
1188 | mimetype, artype, extension, encoding = web.archivespecs[type_] |
|
1187 | mimetype, artype, extension, encoding = web.archivespecs[type_] | |
1189 |
|
1188 | |||
1190 | web.res.headers['Content-Type'] = mimetype |
|
1189 | web.res.headers['Content-Type'] = mimetype | |
1191 | web.res.headers['Content-Disposition'] = 'attachment; filename=%s%s' % ( |
|
1190 | web.res.headers['Content-Disposition'] = 'attachment; filename=%s%s' % ( | |
1192 | name, extension) |
|
1191 | name, extension) | |
1193 |
|
1192 | |||
1194 | if encoding: |
|
1193 | if encoding: | |
1195 | web.res.headers['Content-Encoding'] = encoding |
|
1194 | web.res.headers['Content-Encoding'] = encoding | |
1196 |
|
1195 | |||
1197 | web.res.setbodywillwrite() |
|
1196 | web.res.setbodywillwrite() | |
1198 | if list(web.res.sendresponse()): |
|
1197 | if list(web.res.sendresponse()): | |
1199 | raise error.ProgrammingError('sendresponse() should not emit data ' |
|
1198 | raise error.ProgrammingError('sendresponse() should not emit data ' | |
1200 | 'if writing later') |
|
1199 | 'if writing later') | |
1201 |
|
1200 | |||
1202 | bodyfh = web.res.getbodyfile() |
|
1201 | bodyfh = web.res.getbodyfile() | |
1203 |
|
1202 | |||
1204 | archival.archive(web.repo, bodyfh, cnode, artype, prefix=name, |
|
1203 | archival.archive(web.repo, bodyfh, cnode, artype, prefix=name, | |
1205 | matchfn=match, |
|
1204 | matchfn=match, | |
1206 | subrepos=web.configbool("web", "archivesubrepos")) |
|
1205 | subrepos=web.configbool("web", "archivesubrepos")) | |
1207 |
|
1206 | |||
1208 | return [] |
|
1207 | return [] | |
1209 |
|
1208 | |||
1210 | @webcommand('static') |
|
1209 | @webcommand('static') | |
1211 | def static(web): |
|
1210 | def static(web): | |
1212 | fname = web.req.qsparams['file'] |
|
1211 | fname = web.req.qsparams['file'] | |
1213 | # a repo owner may set web.static in .hg/hgrc to get any file |
|
1212 | # a repo owner may set web.static in .hg/hgrc to get any file | |
1214 | # readable by the user running the CGI script |
|
1213 | # readable by the user running the CGI script | |
1215 | static = web.config("web", "static", None, untrusted=False) |
|
1214 | static = web.config("web", "static", None, untrusted=False) | |
1216 | if not static: |
|
1215 | if not static: | |
1217 | tp = web.templatepath or templater.templatepaths() |
|
1216 | tp = web.templatepath or templater.templatepaths() | |
1218 | if isinstance(tp, str): |
|
1217 | if isinstance(tp, str): | |
1219 | tp = [tp] |
|
1218 | tp = [tp] | |
1220 | static = [os.path.join(p, 'static') for p in tp] |
|
1219 | static = [os.path.join(p, 'static') for p in tp] | |
1221 |
|
1220 | |||
1222 | staticfile(static, fname, web.res) |
|
1221 | staticfile(static, fname, web.res) | |
1223 | return web.res.sendresponse() |
|
1222 | return web.res.sendresponse() | |
1224 |
|
1223 | |||
1225 | @webcommand('graph') |
|
1224 | @webcommand('graph') | |
1226 | def graph(web): |
|
1225 | def graph(web): | |
1227 | """ |
|
1226 | """ | |
1228 | /graph[/{revision}] |
|
1227 | /graph[/{revision}] | |
1229 | ------------------- |
|
1228 | ------------------- | |
1230 |
|
1229 | |||
1231 | Show information about the graphical topology of the repository. |
|
1230 | Show information about the graphical topology of the repository. | |
1232 |
|
1231 | |||
1233 | Information rendered by this handler can be used to create visual |
|
1232 | Information rendered by this handler can be used to create visual | |
1234 | representations of repository topology. |
|
1233 | representations of repository topology. | |
1235 |
|
1234 | |||
1236 | The ``revision`` URL parameter controls the starting changeset. If it's |
|
1235 | The ``revision`` URL parameter controls the starting changeset. If it's | |
1237 | absent, the default is ``tip``. |
|
1236 | absent, the default is ``tip``. | |
1238 |
|
1237 | |||
1239 | The ``revcount`` query string argument can define the number of changesets |
|
1238 | The ``revcount`` query string argument can define the number of changesets | |
1240 | to show information for. |
|
1239 | to show information for. | |
1241 |
|
1240 | |||
1242 | The ``graphtop`` query string argument can specify the starting changeset |
|
1241 | The ``graphtop`` query string argument can specify the starting changeset | |
1243 | for producing ``jsdata`` variable that is used for rendering graph in |
|
1242 | for producing ``jsdata`` variable that is used for rendering graph in | |
1244 | JavaScript. By default it has the same value as ``revision``. |
|
1243 | JavaScript. By default it has the same value as ``revision``. | |
1245 |
|
1244 | |||
1246 | This handler will render the ``graph`` template. |
|
1245 | This handler will render the ``graph`` template. | |
1247 | """ |
|
1246 | """ | |
1248 |
|
1247 | |||
1249 | if 'node' in web.req.qsparams: |
|
1248 | if 'node' in web.req.qsparams: | |
1250 | ctx = webutil.changectx(web.repo, web.req) |
|
1249 | ctx = webutil.changectx(web.repo, web.req) | |
1251 | symrev = webutil.symrevorshortnode(web.req, ctx) |
|
1250 | symrev = webutil.symrevorshortnode(web.req, ctx) | |
1252 | else: |
|
1251 | else: | |
1253 | ctx = web.repo['tip'] |
|
1252 | ctx = web.repo['tip'] | |
1254 | symrev = 'tip' |
|
1253 | symrev = 'tip' | |
1255 | rev = ctx.rev() |
|
1254 | rev = ctx.rev() | |
1256 |
|
1255 | |||
1257 | bg_height = 39 |
|
1256 | bg_height = 39 | |
1258 | revcount = web.maxshortchanges |
|
1257 | revcount = web.maxshortchanges | |
1259 | if 'revcount' in web.req.qsparams: |
|
1258 | if 'revcount' in web.req.qsparams: | |
1260 | try: |
|
1259 | try: | |
1261 | revcount = int(web.req.qsparams.get('revcount', revcount)) |
|
1260 | revcount = int(web.req.qsparams.get('revcount', revcount)) | |
1262 | revcount = max(revcount, 1) |
|
1261 | revcount = max(revcount, 1) | |
1263 | web.tmpl.defaults['sessionvars']['revcount'] = revcount |
|
1262 | web.tmpl.defaults['sessionvars']['revcount'] = revcount | |
1264 | except ValueError: |
|
1263 | except ValueError: | |
1265 | pass |
|
1264 | pass | |
1266 |
|
1265 | |||
1267 | lessvars = copy.copy(web.tmpl.defaults['sessionvars']) |
|
1266 | lessvars = copy.copy(web.tmpl.defaults['sessionvars']) | |
1268 | lessvars['revcount'] = max(revcount // 2, 1) |
|
1267 | lessvars['revcount'] = max(revcount // 2, 1) | |
1269 | morevars = copy.copy(web.tmpl.defaults['sessionvars']) |
|
1268 | morevars = copy.copy(web.tmpl.defaults['sessionvars']) | |
1270 | morevars['revcount'] = revcount * 2 |
|
1269 | morevars['revcount'] = revcount * 2 | |
1271 |
|
1270 | |||
1272 | graphtop = web.req.qsparams.get('graphtop', ctx.hex()) |
|
1271 | graphtop = web.req.qsparams.get('graphtop', ctx.hex()) | |
1273 | graphvars = copy.copy(web.tmpl.defaults['sessionvars']) |
|
1272 | graphvars = copy.copy(web.tmpl.defaults['sessionvars']) | |
1274 | graphvars['graphtop'] = graphtop |
|
1273 | graphvars['graphtop'] = graphtop | |
1275 |
|
1274 | |||
1276 | count = len(web.repo) |
|
1275 | count = len(web.repo) | |
1277 | pos = rev |
|
1276 | pos = rev | |
1278 |
|
1277 | |||
1279 | uprev = min(max(0, count - 1), rev + revcount) |
|
1278 | uprev = min(max(0, count - 1), rev + revcount) | |
1280 | downrev = max(0, rev - revcount) |
|
1279 | downrev = max(0, rev - revcount) | |
1281 | changenav = webutil.revnav(web.repo).gen(pos, revcount, count) |
|
1280 | changenav = webutil.revnav(web.repo).gen(pos, revcount, count) | |
1282 |
|
1281 | |||
1283 | tree = [] |
|
1282 | tree = [] | |
1284 | nextentry = [] |
|
1283 | nextentry = [] | |
1285 | lastrev = 0 |
|
1284 | lastrev = 0 | |
1286 | if pos != -1: |
|
1285 | if pos != -1: | |
1287 | allrevs = web.repo.changelog.revs(pos, 0) |
|
1286 | allrevs = web.repo.changelog.revs(pos, 0) | |
1288 | revs = [] |
|
1287 | revs = [] | |
1289 | for i in allrevs: |
|
1288 | for i in allrevs: | |
1290 | revs.append(i) |
|
1289 | revs.append(i) | |
1291 | if len(revs) >= revcount + 1: |
|
1290 | if len(revs) >= revcount + 1: | |
1292 | break |
|
1291 | break | |
1293 |
|
1292 | |||
1294 | if len(revs) > revcount: |
|
1293 | if len(revs) > revcount: | |
1295 | nextentry = [webutil.commonentry(web.repo, web.repo[revs[-1]])] |
|
1294 | nextentry = [webutil.commonentry(web.repo, web.repo[revs[-1]])] | |
1296 | revs = revs[:-1] |
|
1295 | revs = revs[:-1] | |
1297 |
|
1296 | |||
1298 | lastrev = revs[-1] |
|
1297 | lastrev = revs[-1] | |
1299 |
|
1298 | |||
1300 | # We have to feed a baseset to dagwalker as it is expecting smartset |
|
1299 | # We have to feed a baseset to dagwalker as it is expecting smartset | |
1301 | # object. This does not have a big impact on hgweb performance itself |
|
1300 | # object. This does not have a big impact on hgweb performance itself | |
1302 | # since hgweb graphing code is not itself lazy yet. |
|
1301 | # since hgweb graphing code is not itself lazy yet. | |
1303 | dag = graphmod.dagwalker(web.repo, smartset.baseset(revs)) |
|
1302 | dag = graphmod.dagwalker(web.repo, smartset.baseset(revs)) | |
1304 | # As we said one line above... not lazy. |
|
1303 | # As we said one line above... not lazy. | |
1305 | tree = list(item for item in graphmod.colored(dag, web.repo) |
|
1304 | tree = list(item for item in graphmod.colored(dag, web.repo) | |
1306 | if item[1] == graphmod.CHANGESET) |
|
1305 | if item[1] == graphmod.CHANGESET) | |
1307 |
|
1306 | |||
1308 | def nodecurrent(ctx): |
|
1307 | def nodecurrent(ctx): | |
1309 | wpnodes = web.repo.dirstate.parents() |
|
1308 | wpnodes = web.repo.dirstate.parents() | |
1310 | if wpnodes[1] == nullid: |
|
1309 | if wpnodes[1] == nullid: | |
1311 | wpnodes = wpnodes[:1] |
|
1310 | wpnodes = wpnodes[:1] | |
1312 | if ctx.node() in wpnodes: |
|
1311 | if ctx.node() in wpnodes: | |
1313 | return '@' |
|
1312 | return '@' | |
1314 | return '' |
|
1313 | return '' | |
1315 |
|
1314 | |||
1316 | def nodesymbol(ctx): |
|
1315 | def nodesymbol(ctx): | |
1317 | if ctx.obsolete(): |
|
1316 | if ctx.obsolete(): | |
1318 | return 'x' |
|
1317 | return 'x' | |
1319 | elif ctx.isunstable(): |
|
1318 | elif ctx.isunstable(): | |
1320 | return '*' |
|
1319 | return '*' | |
1321 | elif ctx.closesbranch(): |
|
1320 | elif ctx.closesbranch(): | |
1322 | return '_' |
|
1321 | return '_' | |
1323 | else: |
|
1322 | else: | |
1324 | return 'o' |
|
1323 | return 'o' | |
1325 |
|
1324 | |||
1326 | def fulltree(): |
|
1325 | def fulltree(): | |
1327 | pos = web.repo[graphtop].rev() |
|
1326 | pos = web.repo[graphtop].rev() | |
1328 | tree = [] |
|
1327 | tree = [] | |
1329 | if pos != -1: |
|
1328 | if pos != -1: | |
1330 | revs = web.repo.changelog.revs(pos, lastrev) |
|
1329 | revs = web.repo.changelog.revs(pos, lastrev) | |
1331 | dag = graphmod.dagwalker(web.repo, smartset.baseset(revs)) |
|
1330 | dag = graphmod.dagwalker(web.repo, smartset.baseset(revs)) | |
1332 | tree = list(item for item in graphmod.colored(dag, web.repo) |
|
1331 | tree = list(item for item in graphmod.colored(dag, web.repo) | |
1333 | if item[1] == graphmod.CHANGESET) |
|
1332 | if item[1] == graphmod.CHANGESET) | |
1334 | return tree |
|
1333 | return tree | |
1335 |
|
1334 | |||
1336 | def jsdata(): |
|
1335 | def jsdata(): | |
1337 | return [{'node': pycompat.bytestr(ctx), |
|
1336 | return [{'node': pycompat.bytestr(ctx), | |
1338 | 'graphnode': nodecurrent(ctx) + nodesymbol(ctx), |
|
1337 | 'graphnode': nodecurrent(ctx) + nodesymbol(ctx), | |
1339 | 'vertex': vtx, |
|
1338 | 'vertex': vtx, | |
1340 | 'edges': edges} |
|
1339 | 'edges': edges} | |
1341 | for (id, type, ctx, vtx, edges) in fulltree()] |
|
1340 | for (id, type, ctx, vtx, edges) in fulltree()] | |
1342 |
|
1341 | |||
1343 | def nodes(): |
|
1342 | def nodes(): | |
1344 | parity = paritygen(web.stripecount) |
|
1343 | parity = paritygen(web.stripecount) | |
1345 | for row, (id, type, ctx, vtx, edges) in enumerate(tree): |
|
1344 | for row, (id, type, ctx, vtx, edges) in enumerate(tree): | |
1346 | entry = webutil.commonentry(web.repo, ctx) |
|
1345 | entry = webutil.commonentry(web.repo, ctx) | |
1347 | edgedata = [{'col': edge[0], |
|
1346 | edgedata = [{'col': edge[0], | |
1348 | 'nextcol': edge[1], |
|
1347 | 'nextcol': edge[1], | |
1349 | 'color': (edge[2] - 1) % 6 + 1, |
|
1348 | 'color': (edge[2] - 1) % 6 + 1, | |
1350 | 'width': edge[3], |
|
1349 | 'width': edge[3], | |
1351 | 'bcolor': edge[4]} |
|
1350 | 'bcolor': edge[4]} | |
1352 | for edge in edges] |
|
1351 | for edge in edges] | |
1353 |
|
1352 | |||
1354 | entry.update({'col': vtx[0], |
|
1353 | entry.update({'col': vtx[0], | |
1355 | 'color': (vtx[1] - 1) % 6 + 1, |
|
1354 | 'color': (vtx[1] - 1) % 6 + 1, | |
1356 | 'parity': next(parity), |
|
1355 | 'parity': next(parity), | |
1357 | 'edges': edgedata, |
|
1356 | 'edges': edgedata, | |
1358 | 'row': row, |
|
1357 | 'row': row, | |
1359 | 'nextrow': row + 1}) |
|
1358 | 'nextrow': row + 1}) | |
1360 |
|
1359 | |||
1361 | yield entry |
|
1360 | yield entry | |
1362 |
|
1361 | |||
1363 | rows = len(tree) |
|
1362 | rows = len(tree) | |
1364 |
|
1363 | |||
1365 | return web.sendtemplate( |
|
1364 | return web.sendtemplate( | |
1366 | 'graph', |
|
1365 | 'graph', | |
1367 | rev=rev, |
|
1366 | rev=rev, | |
1368 | symrev=symrev, |
|
1367 | symrev=symrev, | |
1369 | revcount=revcount, |
|
1368 | revcount=revcount, | |
1370 | uprev=uprev, |
|
1369 | uprev=uprev, | |
1371 | lessvars=lessvars, |
|
1370 | lessvars=lessvars, | |
1372 | morevars=morevars, |
|
1371 | morevars=morevars, | |
1373 | downrev=downrev, |
|
1372 | downrev=downrev, | |
1374 | graphvars=graphvars, |
|
1373 | graphvars=graphvars, | |
1375 | rows=rows, |
|
1374 | rows=rows, | |
1376 | bg_height=bg_height, |
|
1375 | bg_height=bg_height, | |
1377 | changesets=count, |
|
1376 | changesets=count, | |
1378 | nextentry=nextentry, |
|
1377 | nextentry=nextentry, | |
1379 | jsdata=lambda **x: jsdata(), |
|
1378 | jsdata=lambda **x: jsdata(), | |
1380 | nodes=lambda **x: nodes(), |
|
1379 | nodes=lambda **x: nodes(), | |
1381 | node=ctx.hex(), |
|
1380 | node=ctx.hex(), | |
1382 | changenav=changenav) |
|
1381 | changenav=changenav) | |
1383 |
|
1382 | |||
1384 | def _getdoc(e): |
|
1383 | def _getdoc(e): | |
1385 | doc = e[0].__doc__ |
|
1384 | doc = e[0].__doc__ | |
1386 | if doc: |
|
1385 | if doc: | |
1387 | doc = _(doc).partition('\n')[0] |
|
1386 | doc = _(doc).partition('\n')[0] | |
1388 | else: |
|
1387 | else: | |
1389 | doc = _('(no help text available)') |
|
1388 | doc = _('(no help text available)') | |
1390 | return doc |
|
1389 | return doc | |
1391 |
|
1390 | |||
1392 | @webcommand('help') |
|
1391 | @webcommand('help') | |
1393 | def help(web): |
|
1392 | def help(web): | |
1394 | """ |
|
1393 | """ | |
1395 | /help[/{topic}] |
|
1394 | /help[/{topic}] | |
1396 | --------------- |
|
1395 | --------------- | |
1397 |
|
1396 | |||
1398 | Render help documentation. |
|
1397 | Render help documentation. | |
1399 |
|
1398 | |||
1400 | This web command is roughly equivalent to :hg:`help`. If a ``topic`` |
|
1399 | This web command is roughly equivalent to :hg:`help`. If a ``topic`` | |
1401 | is defined, that help topic will be rendered. If not, an index of |
|
1400 | is defined, that help topic will be rendered. If not, an index of | |
1402 | available help topics will be rendered. |
|
1401 | available help topics will be rendered. | |
1403 |
|
1402 | |||
1404 | The ``help`` template will be rendered when requesting help for a topic. |
|
1403 | The ``help`` template will be rendered when requesting help for a topic. | |
1405 | ``helptopics`` will be rendered for the index of help topics. |
|
1404 | ``helptopics`` will be rendered for the index of help topics. | |
1406 | """ |
|
1405 | """ | |
1407 | from .. import commands, help as helpmod # avoid cycle |
|
1406 | from .. import commands, help as helpmod # avoid cycle | |
1408 |
|
1407 | |||
1409 | topicname = web.req.qsparams.get('node') |
|
1408 | topicname = web.req.qsparams.get('node') | |
1410 | if not topicname: |
|
1409 | if not topicname: | |
1411 | def topics(**map): |
|
1410 | def topics(**map): | |
1412 | for entries, summary, _doc in helpmod.helptable: |
|
1411 | for entries, summary, _doc in helpmod.helptable: | |
1413 | yield {'topic': entries[0], 'summary': summary} |
|
1412 | yield {'topic': entries[0], 'summary': summary} | |
1414 |
|
1413 | |||
1415 | early, other = [], [] |
|
1414 | early, other = [], [] | |
1416 | primary = lambda s: s.partition('|')[0] |
|
1415 | primary = lambda s: s.partition('|')[0] | |
1417 | for c, e in commands.table.iteritems(): |
|
1416 | for c, e in commands.table.iteritems(): | |
1418 | doc = _getdoc(e) |
|
1417 | doc = _getdoc(e) | |
1419 | if 'DEPRECATED' in doc or c.startswith('debug'): |
|
1418 | if 'DEPRECATED' in doc or c.startswith('debug'): | |
1420 | continue |
|
1419 | continue | |
1421 | cmd = primary(c) |
|
1420 | cmd = primary(c) | |
1422 | if cmd.startswith('^'): |
|
1421 | if cmd.startswith('^'): | |
1423 | early.append((cmd[1:], doc)) |
|
1422 | early.append((cmd[1:], doc)) | |
1424 | else: |
|
1423 | else: | |
1425 | other.append((cmd, doc)) |
|
1424 | other.append((cmd, doc)) | |
1426 |
|
1425 | |||
1427 | early.sort() |
|
1426 | early.sort() | |
1428 | other.sort() |
|
1427 | other.sort() | |
1429 |
|
1428 | |||
1430 | def earlycommands(**map): |
|
1429 | def earlycommands(**map): | |
1431 | for c, doc in early: |
|
1430 | for c, doc in early: | |
1432 | yield {'topic': c, 'summary': doc} |
|
1431 | yield {'topic': c, 'summary': doc} | |
1433 |
|
1432 | |||
1434 | def othercommands(**map): |
|
1433 | def othercommands(**map): | |
1435 | for c, doc in other: |
|
1434 | for c, doc in other: | |
1436 | yield {'topic': c, 'summary': doc} |
|
1435 | yield {'topic': c, 'summary': doc} | |
1437 |
|
1436 | |||
1438 | return web.sendtemplate( |
|
1437 | return web.sendtemplate( | |
1439 | 'helptopics', |
|
1438 | 'helptopics', | |
1440 | topics=topics, |
|
1439 | topics=topics, | |
1441 | earlycommands=earlycommands, |
|
1440 | earlycommands=earlycommands, | |
1442 | othercommands=othercommands, |
|
1441 | othercommands=othercommands, | |
1443 | title='Index') |
|
1442 | title='Index') | |
1444 |
|
1443 | |||
1445 | # Render an index of sub-topics. |
|
1444 | # Render an index of sub-topics. | |
1446 | if topicname in helpmod.subtopics: |
|
1445 | if topicname in helpmod.subtopics: | |
1447 | topics = [] |
|
1446 | topics = [] | |
1448 | for entries, summary, _doc in helpmod.subtopics[topicname]: |
|
1447 | for entries, summary, _doc in helpmod.subtopics[topicname]: | |
1449 | topics.append({ |
|
1448 | topics.append({ | |
1450 | 'topic': '%s.%s' % (topicname, entries[0]), |
|
1449 | 'topic': '%s.%s' % (topicname, entries[0]), | |
1451 | 'basename': entries[0], |
|
1450 | 'basename': entries[0], | |
1452 | 'summary': summary, |
|
1451 | 'summary': summary, | |
1453 | }) |
|
1452 | }) | |
1454 |
|
1453 | |||
1455 | return web.sendtemplate( |
|
1454 | return web.sendtemplate( | |
1456 | 'helptopics', |
|
1455 | 'helptopics', | |
1457 | topics=topics, |
|
1456 | topics=topics, | |
1458 | title=topicname, |
|
1457 | title=topicname, | |
1459 | subindex=True) |
|
1458 | subindex=True) | |
1460 |
|
1459 | |||
1461 | u = webutil.wsgiui.load() |
|
1460 | u = webutil.wsgiui.load() | |
1462 | u.verbose = True |
|
1461 | u.verbose = True | |
1463 |
|
1462 | |||
1464 | # Render a page from a sub-topic. |
|
1463 | # Render a page from a sub-topic. | |
1465 | if '.' in topicname: |
|
1464 | if '.' in topicname: | |
1466 | # TODO implement support for rendering sections, like |
|
1465 | # TODO implement support for rendering sections, like | |
1467 | # `hg help` works. |
|
1466 | # `hg help` works. | |
1468 | topic, subtopic = topicname.split('.', 1) |
|
1467 | topic, subtopic = topicname.split('.', 1) | |
1469 | if topic not in helpmod.subtopics: |
|
1468 | if topic not in helpmod.subtopics: | |
1470 | raise ErrorResponse(HTTP_NOT_FOUND) |
|
1469 | raise ErrorResponse(HTTP_NOT_FOUND) | |
1471 | else: |
|
1470 | else: | |
1472 | topic = topicname |
|
1471 | topic = topicname | |
1473 | subtopic = None |
|
1472 | subtopic = None | |
1474 |
|
1473 | |||
1475 | try: |
|
1474 | try: | |
1476 | doc = helpmod.help_(u, commands, topic, subtopic=subtopic) |
|
1475 | doc = helpmod.help_(u, commands, topic, subtopic=subtopic) | |
1477 | except error.Abort: |
|
1476 | except error.Abort: | |
1478 | raise ErrorResponse(HTTP_NOT_FOUND) |
|
1477 | raise ErrorResponse(HTTP_NOT_FOUND) | |
1479 |
|
1478 | |||
1480 | return web.sendtemplate( |
|
1479 | return web.sendtemplate( | |
1481 | 'help', |
|
1480 | 'help', | |
1482 | topic=topicname, |
|
1481 | topic=topicname, | |
1483 | doc=doc) |
|
1482 | doc=doc) | |
1484 |
|
1483 | |||
1485 | # tell hggettext to extract docstrings from these functions: |
|
1484 | # tell hggettext to extract docstrings from these functions: | |
1486 | i18nfunctions = commands.values() |
|
1485 | i18nfunctions = commands.values() |
@@ -1,680 +1,700 b'' | |||||
1 | # hgweb/webutil.py - utility library for the web interface. |
|
1 | # hgweb/webutil.py - utility library for the web interface. | |
2 | # |
|
2 | # | |
3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> | |
4 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
4 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
5 | # |
|
5 | # | |
6 | # This software may be used and distributed according to the terms of the |
|
6 | # This software may be used and distributed according to the terms of the | |
7 | # GNU General Public License version 2 or any later version. |
|
7 | # GNU General Public License version 2 or any later version. | |
8 |
|
8 | |||
9 | from __future__ import absolute_import |
|
9 | from __future__ import absolute_import | |
10 |
|
10 | |||
11 | import copy |
|
11 | import copy | |
12 | import difflib |
|
12 | import difflib | |
13 | import os |
|
13 | import os | |
14 | import re |
|
14 | import re | |
15 |
|
15 | |||
16 | from ..i18n import _ |
|
16 | from ..i18n import _ | |
17 | from ..node import hex, nullid, short |
|
17 | from ..node import hex, nullid, short | |
18 |
|
18 | |||
19 | from .common import ( |
|
19 | from .common import ( | |
20 | ErrorResponse, |
|
20 | ErrorResponse, | |
21 | HTTP_BAD_REQUEST, |
|
21 | HTTP_BAD_REQUEST, | |
22 | HTTP_NOT_FOUND, |
|
22 | HTTP_NOT_FOUND, | |
23 | paritygen, |
|
23 | paritygen, | |
24 | ) |
|
24 | ) | |
25 |
|
25 | |||
26 | from .. import ( |
|
26 | from .. import ( | |
27 | context, |
|
27 | context, | |
28 | error, |
|
28 | error, | |
29 | match, |
|
29 | match, | |
30 | mdiff, |
|
30 | mdiff, | |
31 | obsutil, |
|
31 | obsutil, | |
32 | patch, |
|
32 | patch, | |
33 | pathutil, |
|
33 | pathutil, | |
34 | pycompat, |
|
34 | pycompat, | |
35 | templatefilters, |
|
35 | templatefilters, | |
36 | templatekw, |
|
36 | templatekw, | |
37 | ui as uimod, |
|
37 | ui as uimod, | |
38 | util, |
|
38 | util, | |
39 | ) |
|
39 | ) | |
40 |
|
40 | |||
41 | def up(p): |
|
41 | def up(p): | |
42 | if p[0:1] != "/": |
|
42 | if p[0:1] != "/": | |
43 | p = "/" + p |
|
43 | p = "/" + p | |
44 | if p[-1:] == "/": |
|
44 | if p[-1:] == "/": | |
45 | p = p[:-1] |
|
45 | p = p[:-1] | |
46 | up = os.path.dirname(p) |
|
46 | up = os.path.dirname(p) | |
47 | if up == "/": |
|
47 | if up == "/": | |
48 | return "/" |
|
48 | return "/" | |
49 | return up + "/" |
|
49 | return up + "/" | |
50 |
|
50 | |||
51 | def _navseq(step, firststep=None): |
|
51 | def _navseq(step, firststep=None): | |
52 | if firststep: |
|
52 | if firststep: | |
53 | yield firststep |
|
53 | yield firststep | |
54 | if firststep >= 20 and firststep <= 40: |
|
54 | if firststep >= 20 and firststep <= 40: | |
55 | firststep = 50 |
|
55 | firststep = 50 | |
56 | yield firststep |
|
56 | yield firststep | |
57 | assert step > 0 |
|
57 | assert step > 0 | |
58 | assert firststep > 0 |
|
58 | assert firststep > 0 | |
59 | while step <= firststep: |
|
59 | while step <= firststep: | |
60 | step *= 10 |
|
60 | step *= 10 | |
61 | while True: |
|
61 | while True: | |
62 | yield 1 * step |
|
62 | yield 1 * step | |
63 | yield 3 * step |
|
63 | yield 3 * step | |
64 | step *= 10 |
|
64 | step *= 10 | |
65 |
|
65 | |||
66 | class revnav(object): |
|
66 | class revnav(object): | |
67 |
|
67 | |||
68 | def __init__(self, repo): |
|
68 | def __init__(self, repo): | |
69 | """Navigation generation object |
|
69 | """Navigation generation object | |
70 |
|
70 | |||
71 | :repo: repo object we generate nav for |
|
71 | :repo: repo object we generate nav for | |
72 | """ |
|
72 | """ | |
73 | # used for hex generation |
|
73 | # used for hex generation | |
74 | self._revlog = repo.changelog |
|
74 | self._revlog = repo.changelog | |
75 |
|
75 | |||
76 | def __nonzero__(self): |
|
76 | def __nonzero__(self): | |
77 | """return True if any revision to navigate over""" |
|
77 | """return True if any revision to navigate over""" | |
78 | return self._first() is not None |
|
78 | return self._first() is not None | |
79 |
|
79 | |||
80 | __bool__ = __nonzero__ |
|
80 | __bool__ = __nonzero__ | |
81 |
|
81 | |||
82 | def _first(self): |
|
82 | def _first(self): | |
83 | """return the minimum non-filtered changeset or None""" |
|
83 | """return the minimum non-filtered changeset or None""" | |
84 | try: |
|
84 | try: | |
85 | return next(iter(self._revlog)) |
|
85 | return next(iter(self._revlog)) | |
86 | except StopIteration: |
|
86 | except StopIteration: | |
87 | return None |
|
87 | return None | |
88 |
|
88 | |||
89 | def hex(self, rev): |
|
89 | def hex(self, rev): | |
90 | return hex(self._revlog.node(rev)) |
|
90 | return hex(self._revlog.node(rev)) | |
91 |
|
91 | |||
92 | def gen(self, pos, pagelen, limit): |
|
92 | def gen(self, pos, pagelen, limit): | |
93 | """computes label and revision id for navigation link |
|
93 | """computes label and revision id for navigation link | |
94 |
|
94 | |||
95 | :pos: is the revision relative to which we generate navigation. |
|
95 | :pos: is the revision relative to which we generate navigation. | |
96 | :pagelen: the size of each navigation page |
|
96 | :pagelen: the size of each navigation page | |
97 | :limit: how far shall we link |
|
97 | :limit: how far shall we link | |
98 |
|
98 | |||
99 | The return is: |
|
99 | The return is: | |
100 | - a single element tuple |
|
100 | - a single element tuple | |
101 | - containing a dictionary with a `before` and `after` key |
|
101 | - containing a dictionary with a `before` and `after` key | |
102 | - values are generator functions taking arbitrary number of kwargs |
|
102 | - values are generator functions taking arbitrary number of kwargs | |
103 | - yield items are dictionaries with `label` and `node` keys |
|
103 | - yield items are dictionaries with `label` and `node` keys | |
104 | """ |
|
104 | """ | |
105 | if not self: |
|
105 | if not self: | |
106 | # empty repo |
|
106 | # empty repo | |
107 | return ({'before': (), 'after': ()},) |
|
107 | return ({'before': (), 'after': ()},) | |
108 |
|
108 | |||
109 | targets = [] |
|
109 | targets = [] | |
110 | for f in _navseq(1, pagelen): |
|
110 | for f in _navseq(1, pagelen): | |
111 | if f > limit: |
|
111 | if f > limit: | |
112 | break |
|
112 | break | |
113 | targets.append(pos + f) |
|
113 | targets.append(pos + f) | |
114 | targets.append(pos - f) |
|
114 | targets.append(pos - f) | |
115 | targets.sort() |
|
115 | targets.sort() | |
116 |
|
116 | |||
117 | first = self._first() |
|
117 | first = self._first() | |
118 | navbefore = [("(%i)" % first, self.hex(first))] |
|
118 | navbefore = [("(%i)" % first, self.hex(first))] | |
119 | navafter = [] |
|
119 | navafter = [] | |
120 | for rev in targets: |
|
120 | for rev in targets: | |
121 | if rev not in self._revlog: |
|
121 | if rev not in self._revlog: | |
122 | continue |
|
122 | continue | |
123 | if pos < rev < limit: |
|
123 | if pos < rev < limit: | |
124 | navafter.append(("+%d" % abs(rev - pos), self.hex(rev))) |
|
124 | navafter.append(("+%d" % abs(rev - pos), self.hex(rev))) | |
125 | if 0 < rev < pos: |
|
125 | if 0 < rev < pos: | |
126 | navbefore.append(("-%d" % abs(rev - pos), self.hex(rev))) |
|
126 | navbefore.append(("-%d" % abs(rev - pos), self.hex(rev))) | |
127 |
|
127 | |||
128 |
|
128 | |||
129 | navafter.append(("tip", "tip")) |
|
129 | navafter.append(("tip", "tip")) | |
130 |
|
130 | |||
131 | data = lambda i: {"label": i[0], "node": i[1]} |
|
131 | data = lambda i: {"label": i[0], "node": i[1]} | |
132 | return ({'before': lambda **map: (data(i) for i in navbefore), |
|
132 | return ({'before': lambda **map: (data(i) for i in navbefore), | |
133 | 'after': lambda **map: (data(i) for i in navafter)},) |
|
133 | 'after': lambda **map: (data(i) for i in navafter)},) | |
134 |
|
134 | |||
135 | class filerevnav(revnav): |
|
135 | class filerevnav(revnav): | |
136 |
|
136 | |||
137 | def __init__(self, repo, path): |
|
137 | def __init__(self, repo, path): | |
138 | """Navigation generation object |
|
138 | """Navigation generation object | |
139 |
|
139 | |||
140 | :repo: repo object we generate nav for |
|
140 | :repo: repo object we generate nav for | |
141 | :path: path of the file we generate nav for |
|
141 | :path: path of the file we generate nav for | |
142 | """ |
|
142 | """ | |
143 | # used for iteration |
|
143 | # used for iteration | |
144 | self._changelog = repo.unfiltered().changelog |
|
144 | self._changelog = repo.unfiltered().changelog | |
145 | # used for hex generation |
|
145 | # used for hex generation | |
146 | self._revlog = repo.file(path) |
|
146 | self._revlog = repo.file(path) | |
147 |
|
147 | |||
148 | def hex(self, rev): |
|
148 | def hex(self, rev): | |
149 | return hex(self._changelog.node(self._revlog.linkrev(rev))) |
|
149 | return hex(self._changelog.node(self._revlog.linkrev(rev))) | |
150 |
|
150 | |||
151 | class _siblings(object): |
|
151 | class _siblings(object): | |
152 | def __init__(self, siblings=None, hiderev=None): |
|
152 | def __init__(self, siblings=None, hiderev=None): | |
153 | if siblings is None: |
|
153 | if siblings is None: | |
154 | siblings = [] |
|
154 | siblings = [] | |
155 | self.siblings = [s for s in siblings if s.node() != nullid] |
|
155 | self.siblings = [s for s in siblings if s.node() != nullid] | |
156 | if len(self.siblings) == 1 and self.siblings[0].rev() == hiderev: |
|
156 | if len(self.siblings) == 1 and self.siblings[0].rev() == hiderev: | |
157 | self.siblings = [] |
|
157 | self.siblings = [] | |
158 |
|
158 | |||
159 | def __iter__(self): |
|
159 | def __iter__(self): | |
160 | for s in self.siblings: |
|
160 | for s in self.siblings: | |
161 | d = { |
|
161 | d = { | |
162 | 'node': s.hex(), |
|
162 | 'node': s.hex(), | |
163 | 'rev': s.rev(), |
|
163 | 'rev': s.rev(), | |
164 | 'user': s.user(), |
|
164 | 'user': s.user(), | |
165 | 'date': s.date(), |
|
165 | 'date': s.date(), | |
166 | 'description': s.description(), |
|
166 | 'description': s.description(), | |
167 | 'branch': s.branch(), |
|
167 | 'branch': s.branch(), | |
168 | } |
|
168 | } | |
169 | if util.safehasattr(s, 'path'): |
|
169 | if util.safehasattr(s, 'path'): | |
170 | d['file'] = s.path() |
|
170 | d['file'] = s.path() | |
171 | yield d |
|
171 | yield d | |
172 |
|
172 | |||
173 | def __len__(self): |
|
173 | def __len__(self): | |
174 | return len(self.siblings) |
|
174 | return len(self.siblings) | |
175 |
|
175 | |||
176 | def difffeatureopts(req, ui, section): |
|
176 | def difffeatureopts(req, ui, section): | |
177 | diffopts = patch.difffeatureopts(ui, untrusted=True, |
|
177 | diffopts = patch.difffeatureopts(ui, untrusted=True, | |
178 | section=section, whitespace=True) |
|
178 | section=section, whitespace=True) | |
179 |
|
179 | |||
180 | for k in ('ignorews', 'ignorewsamount', 'ignorewseol', 'ignoreblanklines'): |
|
180 | for k in ('ignorews', 'ignorewsamount', 'ignorewseol', 'ignoreblanklines'): | |
181 | v = req.qsparams.get(k) |
|
181 | v = req.qsparams.get(k) | |
182 | if v is not None: |
|
182 | if v is not None: | |
183 | v = util.parsebool(v) |
|
183 | v = util.parsebool(v) | |
184 | setattr(diffopts, k, v if v is not None else True) |
|
184 | setattr(diffopts, k, v if v is not None else True) | |
185 |
|
185 | |||
186 | return diffopts |
|
186 | return diffopts | |
187 |
|
187 | |||
188 | def annotate(req, fctx, ui): |
|
188 | def annotate(req, fctx, ui): | |
189 | diffopts = difffeatureopts(req, ui, 'annotate') |
|
189 | diffopts = difffeatureopts(req, ui, 'annotate') | |
190 | return fctx.annotate(follow=True, linenumber=True, diffopts=diffopts) |
|
190 | return fctx.annotate(follow=True, linenumber=True, diffopts=diffopts) | |
191 |
|
191 | |||
192 | def parents(ctx, hide=None): |
|
192 | def parents(ctx, hide=None): | |
193 | if isinstance(ctx, context.basefilectx): |
|
193 | if isinstance(ctx, context.basefilectx): | |
194 | introrev = ctx.introrev() |
|
194 | introrev = ctx.introrev() | |
195 | if ctx.changectx().rev() != introrev: |
|
195 | if ctx.changectx().rev() != introrev: | |
196 | return _siblings([ctx.repo()[introrev]], hide) |
|
196 | return _siblings([ctx.repo()[introrev]], hide) | |
197 | return _siblings(ctx.parents(), hide) |
|
197 | return _siblings(ctx.parents(), hide) | |
198 |
|
198 | |||
199 | def children(ctx, hide=None): |
|
199 | def children(ctx, hide=None): | |
200 | return _siblings(ctx.children(), hide) |
|
200 | return _siblings(ctx.children(), hide) | |
201 |
|
201 | |||
202 | def renamelink(fctx): |
|
202 | def renamelink(fctx): | |
203 | r = fctx.renamed() |
|
203 | r = fctx.renamed() | |
204 | if r: |
|
204 | if r: | |
205 | return [{'file': r[0], 'node': hex(r[1])}] |
|
205 | return [{'file': r[0], 'node': hex(r[1])}] | |
206 | return [] |
|
206 | return [] | |
207 |
|
207 | |||
208 | def nodetagsdict(repo, node): |
|
208 | def nodetagsdict(repo, node): | |
209 | return [{"name": i} for i in repo.nodetags(node)] |
|
209 | return [{"name": i} for i in repo.nodetags(node)] | |
210 |
|
210 | |||
211 | def nodebookmarksdict(repo, node): |
|
211 | def nodebookmarksdict(repo, node): | |
212 | return [{"name": i} for i in repo.nodebookmarks(node)] |
|
212 | return [{"name": i} for i in repo.nodebookmarks(node)] | |
213 |
|
213 | |||
214 | def nodebranchdict(repo, ctx): |
|
214 | def nodebranchdict(repo, ctx): | |
215 | branches = [] |
|
215 | branches = [] | |
216 | branch = ctx.branch() |
|
216 | branch = ctx.branch() | |
217 | # If this is an empty repo, ctx.node() == nullid, |
|
217 | # If this is an empty repo, ctx.node() == nullid, | |
218 | # ctx.branch() == 'default'. |
|
218 | # ctx.branch() == 'default'. | |
219 | try: |
|
219 | try: | |
220 | branchnode = repo.branchtip(branch) |
|
220 | branchnode = repo.branchtip(branch) | |
221 | except error.RepoLookupError: |
|
221 | except error.RepoLookupError: | |
222 | branchnode = None |
|
222 | branchnode = None | |
223 | if branchnode == ctx.node(): |
|
223 | if branchnode == ctx.node(): | |
224 | branches.append({"name": branch}) |
|
224 | branches.append({"name": branch}) | |
225 | return branches |
|
225 | return branches | |
226 |
|
226 | |||
227 | def nodeinbranch(repo, ctx): |
|
227 | def nodeinbranch(repo, ctx): | |
228 | branches = [] |
|
228 | branches = [] | |
229 | branch = ctx.branch() |
|
229 | branch = ctx.branch() | |
230 | try: |
|
230 | try: | |
231 | branchnode = repo.branchtip(branch) |
|
231 | branchnode = repo.branchtip(branch) | |
232 | except error.RepoLookupError: |
|
232 | except error.RepoLookupError: | |
233 | branchnode = None |
|
233 | branchnode = None | |
234 | if branch != 'default' and branchnode != ctx.node(): |
|
234 | if branch != 'default' and branchnode != ctx.node(): | |
235 | branches.append({"name": branch}) |
|
235 | branches.append({"name": branch}) | |
236 | return branches |
|
236 | return branches | |
237 |
|
237 | |||
238 | def nodebranchnodefault(ctx): |
|
238 | def nodebranchnodefault(ctx): | |
239 | branches = [] |
|
239 | branches = [] | |
240 | branch = ctx.branch() |
|
240 | branch = ctx.branch() | |
241 | if branch != 'default': |
|
241 | if branch != 'default': | |
242 | branches.append({"name": branch}) |
|
242 | branches.append({"name": branch}) | |
243 | return branches |
|
243 | return branches | |
244 |
|
244 | |||
245 | def showtag(repo, tmpl, t1, node=nullid, **args): |
|
245 | def showtag(repo, tmpl, t1, node=nullid, **args): | |
|
246 | args = pycompat.byteskwargs(args) | |||
246 | for t in repo.nodetags(node): |
|
247 | for t in repo.nodetags(node): | |
247 | yield tmpl(t1, tag=t, **args) |
|
248 | lm = args.copy() | |
|
249 | lm['tag'] = t | |||
|
250 | yield tmpl.generate(t1, lm) | |||
248 |
|
251 | |||
249 | def showbookmark(repo, tmpl, t1, node=nullid, **args): |
|
252 | def showbookmark(repo, tmpl, t1, node=nullid, **args): | |
|
253 | args = pycompat.byteskwargs(args) | |||
250 | for t in repo.nodebookmarks(node): |
|
254 | for t in repo.nodebookmarks(node): | |
251 | yield tmpl(t1, bookmark=t, **args) |
|
255 | lm = args.copy() | |
|
256 | lm['bookmark'] = t | |||
|
257 | yield tmpl.generate(t1, lm) | |||
252 |
|
258 | |||
253 | def branchentries(repo, stripecount, limit=0): |
|
259 | def branchentries(repo, stripecount, limit=0): | |
254 | tips = [] |
|
260 | tips = [] | |
255 | heads = repo.heads() |
|
261 | heads = repo.heads() | |
256 | parity = paritygen(stripecount) |
|
262 | parity = paritygen(stripecount) | |
257 | sortkey = lambda item: (not item[1], item[0].rev()) |
|
263 | sortkey = lambda item: (not item[1], item[0].rev()) | |
258 |
|
264 | |||
259 | def entries(**map): |
|
265 | def entries(**map): | |
260 | count = 0 |
|
266 | count = 0 | |
261 | if not tips: |
|
267 | if not tips: | |
262 | for tag, hs, tip, closed in repo.branchmap().iterbranches(): |
|
268 | for tag, hs, tip, closed in repo.branchmap().iterbranches(): | |
263 | tips.append((repo[tip], closed)) |
|
269 | tips.append((repo[tip], closed)) | |
264 | for ctx, closed in sorted(tips, key=sortkey, reverse=True): |
|
270 | for ctx, closed in sorted(tips, key=sortkey, reverse=True): | |
265 | if limit > 0 and count >= limit: |
|
271 | if limit > 0 and count >= limit: | |
266 | return |
|
272 | return | |
267 | count += 1 |
|
273 | count += 1 | |
268 | if closed: |
|
274 | if closed: | |
269 | status = 'closed' |
|
275 | status = 'closed' | |
270 | elif ctx.node() not in heads: |
|
276 | elif ctx.node() not in heads: | |
271 | status = 'inactive' |
|
277 | status = 'inactive' | |
272 | else: |
|
278 | else: | |
273 | status = 'open' |
|
279 | status = 'open' | |
274 | yield { |
|
280 | yield { | |
275 | 'parity': next(parity), |
|
281 | 'parity': next(parity), | |
276 | 'branch': ctx.branch(), |
|
282 | 'branch': ctx.branch(), | |
277 | 'status': status, |
|
283 | 'status': status, | |
278 | 'node': ctx.hex(), |
|
284 | 'node': ctx.hex(), | |
279 | 'date': ctx.date() |
|
285 | 'date': ctx.date() | |
280 | } |
|
286 | } | |
281 |
|
287 | |||
282 | return entries |
|
288 | return entries | |
283 |
|
289 | |||
284 | def cleanpath(repo, path): |
|
290 | def cleanpath(repo, path): | |
285 | path = path.lstrip('/') |
|
291 | path = path.lstrip('/') | |
286 | return pathutil.canonpath(repo.root, '', path) |
|
292 | return pathutil.canonpath(repo.root, '', path) | |
287 |
|
293 | |||
288 | def changeidctx(repo, changeid): |
|
294 | def changeidctx(repo, changeid): | |
289 | try: |
|
295 | try: | |
290 | ctx = repo[changeid] |
|
296 | ctx = repo[changeid] | |
291 | except error.RepoError: |
|
297 | except error.RepoError: | |
292 | man = repo.manifestlog._revlog |
|
298 | man = repo.manifestlog._revlog | |
293 | ctx = repo[man.linkrev(man.rev(man.lookup(changeid)))] |
|
299 | ctx = repo[man.linkrev(man.rev(man.lookup(changeid)))] | |
294 |
|
300 | |||
295 | return ctx |
|
301 | return ctx | |
296 |
|
302 | |||
297 | def changectx(repo, req): |
|
303 | def changectx(repo, req): | |
298 | changeid = "tip" |
|
304 | changeid = "tip" | |
299 | if 'node' in req.qsparams: |
|
305 | if 'node' in req.qsparams: | |
300 | changeid = req.qsparams['node'] |
|
306 | changeid = req.qsparams['node'] | |
301 | ipos = changeid.find(':') |
|
307 | ipos = changeid.find(':') | |
302 | if ipos != -1: |
|
308 | if ipos != -1: | |
303 | changeid = changeid[(ipos + 1):] |
|
309 | changeid = changeid[(ipos + 1):] | |
304 | elif 'manifest' in req.qsparams: |
|
310 | elif 'manifest' in req.qsparams: | |
305 | changeid = req.qsparams['manifest'] |
|
311 | changeid = req.qsparams['manifest'] | |
306 |
|
312 | |||
307 | return changeidctx(repo, changeid) |
|
313 | return changeidctx(repo, changeid) | |
308 |
|
314 | |||
309 | def basechangectx(repo, req): |
|
315 | def basechangectx(repo, req): | |
310 | if 'node' in req.qsparams: |
|
316 | if 'node' in req.qsparams: | |
311 | changeid = req.qsparams['node'] |
|
317 | changeid = req.qsparams['node'] | |
312 | ipos = changeid.find(':') |
|
318 | ipos = changeid.find(':') | |
313 | if ipos != -1: |
|
319 | if ipos != -1: | |
314 | changeid = changeid[:ipos] |
|
320 | changeid = changeid[:ipos] | |
315 | return changeidctx(repo, changeid) |
|
321 | return changeidctx(repo, changeid) | |
316 |
|
322 | |||
317 | return None |
|
323 | return None | |
318 |
|
324 | |||
319 | def filectx(repo, req): |
|
325 | def filectx(repo, req): | |
320 | if 'file' not in req.qsparams: |
|
326 | if 'file' not in req.qsparams: | |
321 | raise ErrorResponse(HTTP_NOT_FOUND, 'file not given') |
|
327 | raise ErrorResponse(HTTP_NOT_FOUND, 'file not given') | |
322 | path = cleanpath(repo, req.qsparams['file']) |
|
328 | path = cleanpath(repo, req.qsparams['file']) | |
323 | if 'node' in req.qsparams: |
|
329 | if 'node' in req.qsparams: | |
324 | changeid = req.qsparams['node'] |
|
330 | changeid = req.qsparams['node'] | |
325 | elif 'filenode' in req.qsparams: |
|
331 | elif 'filenode' in req.qsparams: | |
326 | changeid = req.qsparams['filenode'] |
|
332 | changeid = req.qsparams['filenode'] | |
327 | else: |
|
333 | else: | |
328 | raise ErrorResponse(HTTP_NOT_FOUND, 'node or filenode not given') |
|
334 | raise ErrorResponse(HTTP_NOT_FOUND, 'node or filenode not given') | |
329 | try: |
|
335 | try: | |
330 | fctx = repo[changeid][path] |
|
336 | fctx = repo[changeid][path] | |
331 | except error.RepoError: |
|
337 | except error.RepoError: | |
332 | fctx = repo.filectx(path, fileid=changeid) |
|
338 | fctx = repo.filectx(path, fileid=changeid) | |
333 |
|
339 | |||
334 | return fctx |
|
340 | return fctx | |
335 |
|
341 | |||
336 | def linerange(req): |
|
342 | def linerange(req): | |
337 | linerange = req.qsparams.getall('linerange') |
|
343 | linerange = req.qsparams.getall('linerange') | |
338 | if not linerange: |
|
344 | if not linerange: | |
339 | return None |
|
345 | return None | |
340 | if len(linerange) > 1: |
|
346 | if len(linerange) > 1: | |
341 | raise ErrorResponse(HTTP_BAD_REQUEST, |
|
347 | raise ErrorResponse(HTTP_BAD_REQUEST, | |
342 | 'redundant linerange parameter') |
|
348 | 'redundant linerange parameter') | |
343 | try: |
|
349 | try: | |
344 | fromline, toline = map(int, linerange[0].split(':', 1)) |
|
350 | fromline, toline = map(int, linerange[0].split(':', 1)) | |
345 | except ValueError: |
|
351 | except ValueError: | |
346 | raise ErrorResponse(HTTP_BAD_REQUEST, |
|
352 | raise ErrorResponse(HTTP_BAD_REQUEST, | |
347 | 'invalid linerange parameter') |
|
353 | 'invalid linerange parameter') | |
348 | try: |
|
354 | try: | |
349 | return util.processlinerange(fromline, toline) |
|
355 | return util.processlinerange(fromline, toline) | |
350 | except error.ParseError as exc: |
|
356 | except error.ParseError as exc: | |
351 | raise ErrorResponse(HTTP_BAD_REQUEST, pycompat.bytestr(exc)) |
|
357 | raise ErrorResponse(HTTP_BAD_REQUEST, pycompat.bytestr(exc)) | |
352 |
|
358 | |||
353 | def formatlinerange(fromline, toline): |
|
359 | def formatlinerange(fromline, toline): | |
354 | return '%d:%d' % (fromline + 1, toline) |
|
360 | return '%d:%d' % (fromline + 1, toline) | |
355 |
|
361 | |||
356 | def succsandmarkers(context, mapping): |
|
362 | def succsandmarkers(context, mapping): | |
357 | repo = context.resource(mapping, 'repo') |
|
363 | repo = context.resource(mapping, 'repo') | |
358 | for item in templatekw.showsuccsandmarkers(context, mapping): |
|
364 | for item in templatekw.showsuccsandmarkers(context, mapping): | |
359 | item['successors'] = _siblings(repo[successor] |
|
365 | item['successors'] = _siblings(repo[successor] | |
360 | for successor in item['successors']) |
|
366 | for successor in item['successors']) | |
361 | yield item |
|
367 | yield item | |
362 |
|
368 | |||
363 | # teach templater succsandmarkers is switched to (context, mapping) API |
|
369 | # teach templater succsandmarkers is switched to (context, mapping) API | |
364 | succsandmarkers._requires = {'repo', 'ctx', 'templ'} |
|
370 | succsandmarkers._requires = {'repo', 'ctx', 'templ'} | |
365 |
|
371 | |||
366 | def whyunstable(context, mapping): |
|
372 | def whyunstable(context, mapping): | |
367 | repo = context.resource(mapping, 'repo') |
|
373 | repo = context.resource(mapping, 'repo') | |
368 | ctx = context.resource(mapping, 'ctx') |
|
374 | ctx = context.resource(mapping, 'ctx') | |
369 |
|
375 | |||
370 | entries = obsutil.whyunstable(repo, ctx) |
|
376 | entries = obsutil.whyunstable(repo, ctx) | |
371 | for entry in entries: |
|
377 | for entry in entries: | |
372 | if entry.get('divergentnodes'): |
|
378 | if entry.get('divergentnodes'): | |
373 | entry['divergentnodes'] = _siblings(entry['divergentnodes']) |
|
379 | entry['divergentnodes'] = _siblings(entry['divergentnodes']) | |
374 | yield entry |
|
380 | yield entry | |
375 |
|
381 | |||
376 | whyunstable._requires = {'repo', 'ctx', 'templ'} |
|
382 | whyunstable._requires = {'repo', 'ctx', 'templ'} | |
377 |
|
383 | |||
378 | def commonentry(repo, ctx): |
|
384 | def commonentry(repo, ctx): | |
379 | node = ctx.node() |
|
385 | node = ctx.node() | |
380 | return { |
|
386 | return { | |
381 | # TODO: perhaps ctx.changectx() should be assigned if ctx is a |
|
387 | # TODO: perhaps ctx.changectx() should be assigned if ctx is a | |
382 | # filectx, but I'm not pretty sure if that would always work because |
|
388 | # filectx, but I'm not pretty sure if that would always work because | |
383 | # fctx.parents() != fctx.changectx.parents() for example. |
|
389 | # fctx.parents() != fctx.changectx.parents() for example. | |
384 | 'ctx': ctx, |
|
390 | 'ctx': ctx, | |
385 | 'revcache': {}, |
|
391 | 'revcache': {}, | |
386 | 'rev': ctx.rev(), |
|
392 | 'rev': ctx.rev(), | |
387 | 'node': hex(node), |
|
393 | 'node': hex(node), | |
388 | 'author': ctx.user(), |
|
394 | 'author': ctx.user(), | |
389 | 'desc': ctx.description(), |
|
395 | 'desc': ctx.description(), | |
390 | 'date': ctx.date(), |
|
396 | 'date': ctx.date(), | |
391 | 'extra': ctx.extra(), |
|
397 | 'extra': ctx.extra(), | |
392 | 'phase': ctx.phasestr(), |
|
398 | 'phase': ctx.phasestr(), | |
393 | 'obsolete': ctx.obsolete(), |
|
399 | 'obsolete': ctx.obsolete(), | |
394 | 'succsandmarkers': succsandmarkers, |
|
400 | 'succsandmarkers': succsandmarkers, | |
395 | 'instabilities': [{"instability": i} for i in ctx.instabilities()], |
|
401 | 'instabilities': [{"instability": i} for i in ctx.instabilities()], | |
396 | 'whyunstable': whyunstable, |
|
402 | 'whyunstable': whyunstable, | |
397 | 'branch': nodebranchnodefault(ctx), |
|
403 | 'branch': nodebranchnodefault(ctx), | |
398 | 'inbranch': nodeinbranch(repo, ctx), |
|
404 | 'inbranch': nodeinbranch(repo, ctx), | |
399 | 'branches': nodebranchdict(repo, ctx), |
|
405 | 'branches': nodebranchdict(repo, ctx), | |
400 | 'tags': nodetagsdict(repo, node), |
|
406 | 'tags': nodetagsdict(repo, node), | |
401 | 'bookmarks': nodebookmarksdict(repo, node), |
|
407 | 'bookmarks': nodebookmarksdict(repo, node), | |
402 | 'parent': lambda **x: parents(ctx), |
|
408 | 'parent': lambda **x: parents(ctx), | |
403 | 'child': lambda **x: children(ctx), |
|
409 | 'child': lambda **x: children(ctx), | |
404 | } |
|
410 | } | |
405 |
|
411 | |||
406 | def changelistentry(web, ctx): |
|
412 | def changelistentry(web, ctx): | |
407 | '''Obtain a dictionary to be used for entries in a changelist. |
|
413 | '''Obtain a dictionary to be used for entries in a changelist. | |
408 |
|
414 | |||
409 | This function is called when producing items for the "entries" list passed |
|
415 | This function is called when producing items for the "entries" list passed | |
410 | to the "shortlog" and "changelog" templates. |
|
416 | to the "shortlog" and "changelog" templates. | |
411 | ''' |
|
417 | ''' | |
412 | repo = web.repo |
|
418 | repo = web.repo | |
413 | rev = ctx.rev() |
|
419 | rev = ctx.rev() | |
414 | n = ctx.node() |
|
420 | n = ctx.node() | |
415 | showtags = showtag(repo, web.tmpl, 'changelogtag', n) |
|
421 | showtags = showtag(repo, web.tmpl, 'changelogtag', n) | |
416 | files = listfilediffs(web.tmpl, ctx.files(), n, web.maxfiles) |
|
422 | files = listfilediffs(web.tmpl, ctx.files(), n, web.maxfiles) | |
417 |
|
423 | |||
418 | entry = commonentry(repo, ctx) |
|
424 | entry = commonentry(repo, ctx) | |
419 | entry.update( |
|
425 | entry.update( | |
420 | allparents=lambda **x: parents(ctx), |
|
426 | allparents=lambda **x: parents(ctx), | |
421 | parent=lambda **x: parents(ctx, rev - 1), |
|
427 | parent=lambda **x: parents(ctx, rev - 1), | |
422 | child=lambda **x: children(ctx, rev + 1), |
|
428 | child=lambda **x: children(ctx, rev + 1), | |
423 | changelogtag=showtags, |
|
429 | changelogtag=showtags, | |
424 | files=files, |
|
430 | files=files, | |
425 | ) |
|
431 | ) | |
426 | return entry |
|
432 | return entry | |
427 |
|
433 | |||
428 | def symrevorshortnode(req, ctx): |
|
434 | def symrevorshortnode(req, ctx): | |
429 | if 'node' in req.qsparams: |
|
435 | if 'node' in req.qsparams: | |
430 | return templatefilters.revescape(req.qsparams['node']) |
|
436 | return templatefilters.revescape(req.qsparams['node']) | |
431 | else: |
|
437 | else: | |
432 | return short(ctx.node()) |
|
438 | return short(ctx.node()) | |
433 |
|
439 | |||
434 | def changesetentry(web, ctx): |
|
440 | def changesetentry(web, ctx): | |
435 | '''Obtain a dictionary to be used to render the "changeset" template.''' |
|
441 | '''Obtain a dictionary to be used to render the "changeset" template.''' | |
436 |
|
442 | |||
437 | showtags = showtag(web.repo, web.tmpl, 'changesettag', ctx.node()) |
|
443 | showtags = showtag(web.repo, web.tmpl, 'changesettag', ctx.node()) | |
438 | showbookmarks = showbookmark(web.repo, web.tmpl, 'changesetbookmark', |
|
444 | showbookmarks = showbookmark(web.repo, web.tmpl, 'changesetbookmark', | |
439 | ctx.node()) |
|
445 | ctx.node()) | |
440 | showbranch = nodebranchnodefault(ctx) |
|
446 | showbranch = nodebranchnodefault(ctx) | |
441 |
|
447 | |||
442 | files = [] |
|
448 | files = [] | |
443 | parity = paritygen(web.stripecount) |
|
449 | parity = paritygen(web.stripecount) | |
444 | for blockno, f in enumerate(ctx.files()): |
|
450 | for blockno, f in enumerate(ctx.files()): | |
445 | template = 'filenodelink' if f in ctx else 'filenolink' |
|
451 | template = 'filenodelink' if f in ctx else 'filenolink' | |
446 | files.append(web.tmpl(template, |
|
452 | files.append(web.tmpl.generate(template, { | |
447 | node=ctx.hex(), file=f, blockno=blockno + 1, |
|
453 | 'node': ctx.hex(), | |
448 | parity=next(parity))) |
|
454 | 'file': f, | |
|
455 | 'blockno': blockno + 1, | |||
|
456 | 'parity': next(parity), | |||
|
457 | })) | |||
449 |
|
458 | |||
450 | basectx = basechangectx(web.repo, web.req) |
|
459 | basectx = basechangectx(web.repo, web.req) | |
451 | if basectx is None: |
|
460 | if basectx is None: | |
452 | basectx = ctx.p1() |
|
461 | basectx = ctx.p1() | |
453 |
|
462 | |||
454 | style = web.config('web', 'style') |
|
463 | style = web.config('web', 'style') | |
455 | if 'style' in web.req.qsparams: |
|
464 | if 'style' in web.req.qsparams: | |
456 | style = web.req.qsparams['style'] |
|
465 | style = web.req.qsparams['style'] | |
457 |
|
466 | |||
458 | diff = diffs(web, ctx, basectx, None, style) |
|
467 | diff = diffs(web, ctx, basectx, None, style) | |
459 |
|
468 | |||
460 | parity = paritygen(web.stripecount) |
|
469 | parity = paritygen(web.stripecount) | |
461 | diffstatsgen = diffstatgen(ctx, basectx) |
|
470 | diffstatsgen = diffstatgen(ctx, basectx) | |
462 | diffstats = diffstat(web.tmpl, ctx, diffstatsgen, parity) |
|
471 | diffstats = diffstat(web.tmpl, ctx, diffstatsgen, parity) | |
463 |
|
472 | |||
464 | return dict( |
|
473 | return dict( | |
465 | diff=diff, |
|
474 | diff=diff, | |
466 | symrev=symrevorshortnode(web.req, ctx), |
|
475 | symrev=symrevorshortnode(web.req, ctx), | |
467 | basenode=basectx.hex(), |
|
476 | basenode=basectx.hex(), | |
468 | changesettag=showtags, |
|
477 | changesettag=showtags, | |
469 | changesetbookmark=showbookmarks, |
|
478 | changesetbookmark=showbookmarks, | |
470 | changesetbranch=showbranch, |
|
479 | changesetbranch=showbranch, | |
471 | files=files, |
|
480 | files=files, | |
472 | diffsummary=lambda **x: diffsummary(diffstatsgen), |
|
481 | diffsummary=lambda **x: diffsummary(diffstatsgen), | |
473 | diffstat=diffstats, |
|
482 | diffstat=diffstats, | |
474 | archives=web.archivelist(ctx.hex()), |
|
483 | archives=web.archivelist(ctx.hex()), | |
475 | **pycompat.strkwargs(commonentry(web.repo, ctx))) |
|
484 | **pycompat.strkwargs(commonentry(web.repo, ctx))) | |
476 |
|
485 | |||
477 | def listfilediffs(tmpl, files, node, max): |
|
486 | def listfilediffs(tmpl, files, node, max): | |
478 | for f in files[:max]: |
|
487 | for f in files[:max]: | |
479 |
yield tmpl('filedifflink', node |
|
488 | yield tmpl.generate('filedifflink', {'node': hex(node), 'file': f}) | |
480 | if len(files) > max: |
|
489 | if len(files) > max: | |
481 | yield tmpl('fileellipses') |
|
490 | yield tmpl.generate('fileellipses', {}) | |
482 |
|
491 | |||
483 | def diffs(web, ctx, basectx, files, style, linerange=None, |
|
492 | def diffs(web, ctx, basectx, files, style, linerange=None, | |
484 | lineidprefix=''): |
|
493 | lineidprefix=''): | |
485 |
|
494 | |||
486 | def prettyprintlines(lines, blockno): |
|
495 | def prettyprintlines(lines, blockno): | |
487 | for lineno, l in enumerate(lines, 1): |
|
496 | for lineno, l in enumerate(lines, 1): | |
488 | difflineno = "%d.%d" % (blockno, lineno) |
|
497 | difflineno = "%d.%d" % (blockno, lineno) | |
489 | if l.startswith('+'): |
|
498 | if l.startswith('+'): | |
490 | ltype = "difflineplus" |
|
499 | ltype = "difflineplus" | |
491 | elif l.startswith('-'): |
|
500 | elif l.startswith('-'): | |
492 | ltype = "difflineminus" |
|
501 | ltype = "difflineminus" | |
493 | elif l.startswith('@'): |
|
502 | elif l.startswith('@'): | |
494 | ltype = "difflineat" |
|
503 | ltype = "difflineat" | |
495 | else: |
|
504 | else: | |
496 | ltype = "diffline" |
|
505 | ltype = "diffline" | |
497 | yield web.tmpl( |
|
506 | yield web.tmpl.generate(ltype, { | |
498 |
|
|
507 | 'line': l, | |
499 |
|
|
508 | 'lineno': lineno, | |
500 |
|
|
509 | 'lineid': lineidprefix + "l%s" % difflineno, | |
501 |
|
|
510 | 'linenumber': "% 8s" % difflineno, | |
502 | linenumber="% 8s" % difflineno) |
|
511 | }) | |
503 |
|
512 | |||
504 | repo = web.repo |
|
513 | repo = web.repo | |
505 | if files: |
|
514 | if files: | |
506 | m = match.exact(repo.root, repo.getcwd(), files) |
|
515 | m = match.exact(repo.root, repo.getcwd(), files) | |
507 | else: |
|
516 | else: | |
508 | m = match.always(repo.root, repo.getcwd()) |
|
517 | m = match.always(repo.root, repo.getcwd()) | |
509 |
|
518 | |||
510 | diffopts = patch.diffopts(repo.ui, untrusted=True) |
|
519 | diffopts = patch.diffopts(repo.ui, untrusted=True) | |
511 | node1 = basectx.node() |
|
520 | node1 = basectx.node() | |
512 | node2 = ctx.node() |
|
521 | node2 = ctx.node() | |
513 | parity = paritygen(web.stripecount) |
|
522 | parity = paritygen(web.stripecount) | |
514 |
|
523 | |||
515 | diffhunks = patch.diffhunks(repo, node1, node2, m, opts=diffopts) |
|
524 | diffhunks = patch.diffhunks(repo, node1, node2, m, opts=diffopts) | |
516 | for blockno, (fctx1, fctx2, header, hunks) in enumerate(diffhunks, 1): |
|
525 | for blockno, (fctx1, fctx2, header, hunks) in enumerate(diffhunks, 1): | |
517 | if style != 'raw': |
|
526 | if style != 'raw': | |
518 | header = header[1:] |
|
527 | header = header[1:] | |
519 | lines = [h + '\n' for h in header] |
|
528 | lines = [h + '\n' for h in header] | |
520 | for hunkrange, hunklines in hunks: |
|
529 | for hunkrange, hunklines in hunks: | |
521 | if linerange is not None and hunkrange is not None: |
|
530 | if linerange is not None and hunkrange is not None: | |
522 | s1, l1, s2, l2 = hunkrange |
|
531 | s1, l1, s2, l2 = hunkrange | |
523 | if not mdiff.hunkinrange((s2, l2), linerange): |
|
532 | if not mdiff.hunkinrange((s2, l2), linerange): | |
524 | continue |
|
533 | continue | |
525 | lines.extend(hunklines) |
|
534 | lines.extend(hunklines) | |
526 | if lines: |
|
535 | if lines: | |
527 |
yield web.tmpl('diffblock', |
|
536 | yield web.tmpl.generate('diffblock', { | |
528 | lines=prettyprintlines(lines, blockno)) |
|
537 | 'parity': next(parity), | |
|
538 | 'blockno': blockno, | |||
|
539 | 'lines': prettyprintlines(lines, blockno), | |||
|
540 | }) | |||
529 |
|
541 | |||
530 | def compare(tmpl, context, leftlines, rightlines): |
|
542 | def compare(tmpl, context, leftlines, rightlines): | |
531 | '''Generator function that provides side-by-side comparison data.''' |
|
543 | '''Generator function that provides side-by-side comparison data.''' | |
532 |
|
544 | |||
533 | def compline(type, leftlineno, leftline, rightlineno, rightline): |
|
545 | def compline(type, leftlineno, leftline, rightlineno, rightline): | |
534 | lineid = leftlineno and ("l%d" % leftlineno) or '' |
|
546 | lineid = leftlineno and ("l%d" % leftlineno) or '' | |
535 | lineid += rightlineno and ("r%d" % rightlineno) or '' |
|
547 | lineid += rightlineno and ("r%d" % rightlineno) or '' | |
536 | llno = '%d' % leftlineno if leftlineno else '' |
|
548 | llno = '%d' % leftlineno if leftlineno else '' | |
537 | rlno = '%d' % rightlineno if rightlineno else '' |
|
549 | rlno = '%d' % rightlineno if rightlineno else '' | |
538 | return tmpl('comparisonline', |
|
550 | return tmpl.generate('comparisonline', { | |
539 |
|
|
551 | 'type': type, | |
540 |
|
|
552 | 'lineid': lineid, | |
541 |
|
|
553 | 'leftlineno': leftlineno, | |
542 |
|
|
554 | 'leftlinenumber': "% 6s" % llno, | |
543 |
|
|
555 | 'leftline': leftline or '', | |
544 |
|
|
556 | 'rightlineno': rightlineno, | |
545 |
|
|
557 | 'rightlinenumber': "% 6s" % rlno, | |
546 |
|
|
558 | 'rightline': rightline or '', | |
|
559 | }) | |||
547 |
|
560 | |||
548 | def getblock(opcodes): |
|
561 | def getblock(opcodes): | |
549 | for type, llo, lhi, rlo, rhi in opcodes: |
|
562 | for type, llo, lhi, rlo, rhi in opcodes: | |
550 | len1 = lhi - llo |
|
563 | len1 = lhi - llo | |
551 | len2 = rhi - rlo |
|
564 | len2 = rhi - rlo | |
552 | count = min(len1, len2) |
|
565 | count = min(len1, len2) | |
553 | for i in xrange(count): |
|
566 | for i in xrange(count): | |
554 | yield compline(type=type, |
|
567 | yield compline(type=type, | |
555 | leftlineno=llo + i + 1, |
|
568 | leftlineno=llo + i + 1, | |
556 | leftline=leftlines[llo + i], |
|
569 | leftline=leftlines[llo + i], | |
557 | rightlineno=rlo + i + 1, |
|
570 | rightlineno=rlo + i + 1, | |
558 | rightline=rightlines[rlo + i]) |
|
571 | rightline=rightlines[rlo + i]) | |
559 | if len1 > len2: |
|
572 | if len1 > len2: | |
560 | for i in xrange(llo + count, lhi): |
|
573 | for i in xrange(llo + count, lhi): | |
561 | yield compline(type=type, |
|
574 | yield compline(type=type, | |
562 | leftlineno=i + 1, |
|
575 | leftlineno=i + 1, | |
563 | leftline=leftlines[i], |
|
576 | leftline=leftlines[i], | |
564 | rightlineno=None, |
|
577 | rightlineno=None, | |
565 | rightline=None) |
|
578 | rightline=None) | |
566 | elif len2 > len1: |
|
579 | elif len2 > len1: | |
567 | for i in xrange(rlo + count, rhi): |
|
580 | for i in xrange(rlo + count, rhi): | |
568 | yield compline(type=type, |
|
581 | yield compline(type=type, | |
569 | leftlineno=None, |
|
582 | leftlineno=None, | |
570 | leftline=None, |
|
583 | leftline=None, | |
571 | rightlineno=i + 1, |
|
584 | rightlineno=i + 1, | |
572 | rightline=rightlines[i]) |
|
585 | rightline=rightlines[i]) | |
573 |
|
586 | |||
574 | s = difflib.SequenceMatcher(None, leftlines, rightlines) |
|
587 | s = difflib.SequenceMatcher(None, leftlines, rightlines) | |
575 | if context < 0: |
|
588 | if context < 0: | |
576 |
yield tmpl('comparisonblock', |
|
589 | yield tmpl.generate('comparisonblock', | |
|
590 | {'lines': getblock(s.get_opcodes())}) | |||
577 | else: |
|
591 | else: | |
578 | for oc in s.get_grouped_opcodes(n=context): |
|
592 | for oc in s.get_grouped_opcodes(n=context): | |
579 |
yield tmpl('comparisonblock', lines |
|
593 | yield tmpl.generate('comparisonblock', {'lines': getblock(oc)}) | |
580 |
|
594 | |||
581 | def diffstatgen(ctx, basectx): |
|
595 | def diffstatgen(ctx, basectx): | |
582 | '''Generator function that provides the diffstat data.''' |
|
596 | '''Generator function that provides the diffstat data.''' | |
583 |
|
597 | |||
584 | stats = patch.diffstatdata( |
|
598 | stats = patch.diffstatdata( | |
585 | util.iterlines(ctx.diff(basectx, noprefix=False))) |
|
599 | util.iterlines(ctx.diff(basectx, noprefix=False))) | |
586 | maxname, maxtotal, addtotal, removetotal, binary = patch.diffstatsum(stats) |
|
600 | maxname, maxtotal, addtotal, removetotal, binary = patch.diffstatsum(stats) | |
587 | while True: |
|
601 | while True: | |
588 | yield stats, maxname, maxtotal, addtotal, removetotal, binary |
|
602 | yield stats, maxname, maxtotal, addtotal, removetotal, binary | |
589 |
|
603 | |||
590 | def diffsummary(statgen): |
|
604 | def diffsummary(statgen): | |
591 | '''Return a short summary of the diff.''' |
|
605 | '''Return a short summary of the diff.''' | |
592 |
|
606 | |||
593 | stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen) |
|
607 | stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen) | |
594 | return _(' %d files changed, %d insertions(+), %d deletions(-)\n') % ( |
|
608 | return _(' %d files changed, %d insertions(+), %d deletions(-)\n') % ( | |
595 | len(stats), addtotal, removetotal) |
|
609 | len(stats), addtotal, removetotal) | |
596 |
|
610 | |||
597 | def diffstat(tmpl, ctx, statgen, parity): |
|
611 | def diffstat(tmpl, ctx, statgen, parity): | |
598 | '''Return a diffstat template for each file in the diff.''' |
|
612 | '''Return a diffstat template for each file in the diff.''' | |
599 |
|
613 | |||
600 | stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen) |
|
614 | stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen) | |
601 | files = ctx.files() |
|
615 | files = ctx.files() | |
602 |
|
616 | |||
603 | def pct(i): |
|
617 | def pct(i): | |
604 | if maxtotal == 0: |
|
618 | if maxtotal == 0: | |
605 | return 0 |
|
619 | return 0 | |
606 | return (float(i) / maxtotal) * 100 |
|
620 | return (float(i) / maxtotal) * 100 | |
607 |
|
621 | |||
608 | fileno = 0 |
|
622 | fileno = 0 | |
609 | for filename, adds, removes, isbinary in stats: |
|
623 | for filename, adds, removes, isbinary in stats: | |
610 | template = 'diffstatlink' if filename in files else 'diffstatnolink' |
|
624 | template = 'diffstatlink' if filename in files else 'diffstatnolink' | |
611 | total = adds + removes |
|
625 | total = adds + removes | |
612 | fileno += 1 |
|
626 | fileno += 1 | |
613 | yield tmpl(template, node=ctx.hex(), file=filename, fileno=fileno, |
|
627 | yield tmpl.generate(template, { | |
614 | total=total, addpct=pct(adds), removepct=pct(removes), |
|
628 | 'node': ctx.hex(), | |
615 | parity=next(parity)) |
|
629 | 'file': filename, | |
|
630 | 'fileno': fileno, | |||
|
631 | 'total': total, | |||
|
632 | 'addpct': pct(adds), | |||
|
633 | 'removepct': pct(removes), | |||
|
634 | 'parity': next(parity), | |||
|
635 | }) | |||
616 |
|
636 | |||
617 | class sessionvars(object): |
|
637 | class sessionvars(object): | |
618 | def __init__(self, vars, start='?'): |
|
638 | def __init__(self, vars, start='?'): | |
619 | self.start = start |
|
639 | self.start = start | |
620 | self.vars = vars |
|
640 | self.vars = vars | |
621 | def __getitem__(self, key): |
|
641 | def __getitem__(self, key): | |
622 | return self.vars[key] |
|
642 | return self.vars[key] | |
623 | def __setitem__(self, key, value): |
|
643 | def __setitem__(self, key, value): | |
624 | self.vars[key] = value |
|
644 | self.vars[key] = value | |
625 | def __copy__(self): |
|
645 | def __copy__(self): | |
626 | return sessionvars(copy.copy(self.vars), self.start) |
|
646 | return sessionvars(copy.copy(self.vars), self.start) | |
627 | def __iter__(self): |
|
647 | def __iter__(self): | |
628 | separator = self.start |
|
648 | separator = self.start | |
629 | for key, value in sorted(self.vars.iteritems()): |
|
649 | for key, value in sorted(self.vars.iteritems()): | |
630 | yield {'name': key, |
|
650 | yield {'name': key, | |
631 | 'value': pycompat.bytestr(value), |
|
651 | 'value': pycompat.bytestr(value), | |
632 | 'separator': separator, |
|
652 | 'separator': separator, | |
633 | } |
|
653 | } | |
634 | separator = '&' |
|
654 | separator = '&' | |
635 |
|
655 | |||
636 | class wsgiui(uimod.ui): |
|
656 | class wsgiui(uimod.ui): | |
637 | # default termwidth breaks under mod_wsgi |
|
657 | # default termwidth breaks under mod_wsgi | |
638 | def termwidth(self): |
|
658 | def termwidth(self): | |
639 | return 80 |
|
659 | return 80 | |
640 |
|
660 | |||
641 | def getwebsubs(repo): |
|
661 | def getwebsubs(repo): | |
642 | websubtable = [] |
|
662 | websubtable = [] | |
643 | websubdefs = repo.ui.configitems('websub') |
|
663 | websubdefs = repo.ui.configitems('websub') | |
644 | # we must maintain interhg backwards compatibility |
|
664 | # we must maintain interhg backwards compatibility | |
645 | websubdefs += repo.ui.configitems('interhg') |
|
665 | websubdefs += repo.ui.configitems('interhg') | |
646 | for key, pattern in websubdefs: |
|
666 | for key, pattern in websubdefs: | |
647 | # grab the delimiter from the character after the "s" |
|
667 | # grab the delimiter from the character after the "s" | |
648 | unesc = pattern[1:2] |
|
668 | unesc = pattern[1:2] | |
649 | delim = re.escape(unesc) |
|
669 | delim = re.escape(unesc) | |
650 |
|
670 | |||
651 | # identify portions of the pattern, taking care to avoid escaped |
|
671 | # identify portions of the pattern, taking care to avoid escaped | |
652 | # delimiters. the replace format and flags are optional, but |
|
672 | # delimiters. the replace format and flags are optional, but | |
653 | # delimiters are required. |
|
673 | # delimiters are required. | |
654 | match = re.match( |
|
674 | match = re.match( | |
655 | br'^s%s(.+)(?:(?<=\\\\)|(?<!\\))%s(.*)%s([ilmsux])*$' |
|
675 | br'^s%s(.+)(?:(?<=\\\\)|(?<!\\))%s(.*)%s([ilmsux])*$' | |
656 | % (delim, delim, delim), pattern) |
|
676 | % (delim, delim, delim), pattern) | |
657 | if not match: |
|
677 | if not match: | |
658 | repo.ui.warn(_("websub: invalid pattern for %s: %s\n") |
|
678 | repo.ui.warn(_("websub: invalid pattern for %s: %s\n") | |
659 | % (key, pattern)) |
|
679 | % (key, pattern)) | |
660 | continue |
|
680 | continue | |
661 |
|
681 | |||
662 | # we need to unescape the delimiter for regexp and format |
|
682 | # we need to unescape the delimiter for regexp and format | |
663 | delim_re = re.compile(br'(?<!\\)\\%s' % delim) |
|
683 | delim_re = re.compile(br'(?<!\\)\\%s' % delim) | |
664 | regexp = delim_re.sub(unesc, match.group(1)) |
|
684 | regexp = delim_re.sub(unesc, match.group(1)) | |
665 | format = delim_re.sub(unesc, match.group(2)) |
|
685 | format = delim_re.sub(unesc, match.group(2)) | |
666 |
|
686 | |||
667 | # the pattern allows for 6 regexp flags, so set them if necessary |
|
687 | # the pattern allows for 6 regexp flags, so set them if necessary | |
668 | flagin = match.group(3) |
|
688 | flagin = match.group(3) | |
669 | flags = 0 |
|
689 | flags = 0 | |
670 | if flagin: |
|
690 | if flagin: | |
671 | for flag in flagin.upper(): |
|
691 | for flag in flagin.upper(): | |
672 | flags |= re.__dict__[flag] |
|
692 | flags |= re.__dict__[flag] | |
673 |
|
693 | |||
674 | try: |
|
694 | try: | |
675 | regexp = re.compile(regexp, flags) |
|
695 | regexp = re.compile(regexp, flags) | |
676 | websubtable.append((regexp, format)) |
|
696 | websubtable.append((regexp, format)) | |
677 | except re.error: |
|
697 | except re.error: | |
678 | repo.ui.warn(_("websub: invalid regexp for %s: %s\n") |
|
698 | repo.ui.warn(_("websub: invalid regexp for %s: %s\n") | |
679 | % (key, regexp)) |
|
699 | % (key, regexp)) | |
680 | return websubtable |
|
700 | return websubtable |
@@ -1,802 +1,802 b'' | |||||
1 | # templatekw.py - common changeset template keywords |
|
1 | # templatekw.py - common changeset template keywords | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | from .i18n import _ |
|
10 | from .i18n import _ | |
11 | from .node import ( |
|
11 | from .node import ( | |
12 | hex, |
|
12 | hex, | |
13 | nullid, |
|
13 | nullid, | |
14 | ) |
|
14 | ) | |
15 |
|
15 | |||
16 | from . import ( |
|
16 | from . import ( | |
17 | encoding, |
|
17 | encoding, | |
18 | error, |
|
18 | error, | |
19 | hbisect, |
|
19 | hbisect, | |
20 | i18n, |
|
20 | i18n, | |
21 | obsutil, |
|
21 | obsutil, | |
22 | patch, |
|
22 | patch, | |
23 | pycompat, |
|
23 | pycompat, | |
24 | registrar, |
|
24 | registrar, | |
25 | scmutil, |
|
25 | scmutil, | |
26 | templateutil, |
|
26 | templateutil, | |
27 | util, |
|
27 | util, | |
28 | ) |
|
28 | ) | |
29 |
|
29 | |||
30 | _hybrid = templateutil.hybrid |
|
30 | _hybrid = templateutil.hybrid | |
31 | _mappable = templateutil.mappable |
|
31 | _mappable = templateutil.mappable | |
32 | _showlist = templateutil._showlist |
|
32 | _showlist = templateutil._showlist | |
33 | hybriddict = templateutil.hybriddict |
|
33 | hybriddict = templateutil.hybriddict | |
34 | hybridlist = templateutil.hybridlist |
|
34 | hybridlist = templateutil.hybridlist | |
35 | compatdict = templateutil.compatdict |
|
35 | compatdict = templateutil.compatdict | |
36 | compatlist = templateutil.compatlist |
|
36 | compatlist = templateutil.compatlist | |
37 |
|
37 | |||
38 | def showdict(name, data, mapping, plural=None, key='key', value='value', |
|
38 | def showdict(name, data, mapping, plural=None, key='key', value='value', | |
39 | fmt=None, separator=' '): |
|
39 | fmt=None, separator=' '): | |
40 | ui = mapping.get('ui') |
|
40 | ui = mapping.get('ui') | |
41 | if ui: |
|
41 | if ui: | |
42 | ui.deprecwarn("templatekw.showdict() is deprecated, use " |
|
42 | ui.deprecwarn("templatekw.showdict() is deprecated, use " | |
43 | "templateutil.compatdict()", '4.6') |
|
43 | "templateutil.compatdict()", '4.6') | |
44 | c = [{key: k, value: v} for k, v in data.iteritems()] |
|
44 | c = [{key: k, value: v} for k, v in data.iteritems()] | |
45 | f = _showlist(name, c, mapping['templ'], mapping, plural, separator) |
|
45 | f = _showlist(name, c, mapping['templ'], mapping, plural, separator) | |
46 | return hybriddict(data, key=key, value=value, fmt=fmt, gen=f) |
|
46 | return hybriddict(data, key=key, value=value, fmt=fmt, gen=f) | |
47 |
|
47 | |||
48 | def showlist(name, values, mapping, plural=None, element=None, separator=' '): |
|
48 | def showlist(name, values, mapping, plural=None, element=None, separator=' '): | |
49 | ui = mapping.get('ui') |
|
49 | ui = mapping.get('ui') | |
50 | if ui: |
|
50 | if ui: | |
51 | ui.deprecwarn("templatekw.showlist() is deprecated, use " |
|
51 | ui.deprecwarn("templatekw.showlist() is deprecated, use " | |
52 | "templateutil.compatlist()", '4.6') |
|
52 | "templateutil.compatlist()", '4.6') | |
53 | if not element: |
|
53 | if not element: | |
54 | element = name |
|
54 | element = name | |
55 | f = _showlist(name, values, mapping['templ'], mapping, plural, separator) |
|
55 | f = _showlist(name, values, mapping['templ'], mapping, plural, separator) | |
56 | return hybridlist(values, name=element, gen=f) |
|
56 | return hybridlist(values, name=element, gen=f) | |
57 |
|
57 | |||
58 | def getlatesttags(context, mapping, pattern=None): |
|
58 | def getlatesttags(context, mapping, pattern=None): | |
59 | '''return date, distance and name for the latest tag of rev''' |
|
59 | '''return date, distance and name for the latest tag of rev''' | |
60 | repo = context.resource(mapping, 'repo') |
|
60 | repo = context.resource(mapping, 'repo') | |
61 | ctx = context.resource(mapping, 'ctx') |
|
61 | ctx = context.resource(mapping, 'ctx') | |
62 | cache = context.resource(mapping, 'cache') |
|
62 | cache = context.resource(mapping, 'cache') | |
63 |
|
63 | |||
64 | cachename = 'latesttags' |
|
64 | cachename = 'latesttags' | |
65 | if pattern is not None: |
|
65 | if pattern is not None: | |
66 | cachename += '-' + pattern |
|
66 | cachename += '-' + pattern | |
67 | match = util.stringmatcher(pattern)[2] |
|
67 | match = util.stringmatcher(pattern)[2] | |
68 | else: |
|
68 | else: | |
69 | match = util.always |
|
69 | match = util.always | |
70 |
|
70 | |||
71 | if cachename not in cache: |
|
71 | if cachename not in cache: | |
72 | # Cache mapping from rev to a tuple with tag date, tag |
|
72 | # Cache mapping from rev to a tuple with tag date, tag | |
73 | # distance and tag name |
|
73 | # distance and tag name | |
74 | cache[cachename] = {-1: (0, 0, ['null'])} |
|
74 | cache[cachename] = {-1: (0, 0, ['null'])} | |
75 | latesttags = cache[cachename] |
|
75 | latesttags = cache[cachename] | |
76 |
|
76 | |||
77 | rev = ctx.rev() |
|
77 | rev = ctx.rev() | |
78 | todo = [rev] |
|
78 | todo = [rev] | |
79 | while todo: |
|
79 | while todo: | |
80 | rev = todo.pop() |
|
80 | rev = todo.pop() | |
81 | if rev in latesttags: |
|
81 | if rev in latesttags: | |
82 | continue |
|
82 | continue | |
83 | ctx = repo[rev] |
|
83 | ctx = repo[rev] | |
84 | tags = [t for t in ctx.tags() |
|
84 | tags = [t for t in ctx.tags() | |
85 | if (repo.tagtype(t) and repo.tagtype(t) != 'local' |
|
85 | if (repo.tagtype(t) and repo.tagtype(t) != 'local' | |
86 | and match(t))] |
|
86 | and match(t))] | |
87 | if tags: |
|
87 | if tags: | |
88 | latesttags[rev] = ctx.date()[0], 0, [t for t in sorted(tags)] |
|
88 | latesttags[rev] = ctx.date()[0], 0, [t for t in sorted(tags)] | |
89 | continue |
|
89 | continue | |
90 | try: |
|
90 | try: | |
91 | ptags = [latesttags[p.rev()] for p in ctx.parents()] |
|
91 | ptags = [latesttags[p.rev()] for p in ctx.parents()] | |
92 | if len(ptags) > 1: |
|
92 | if len(ptags) > 1: | |
93 | if ptags[0][2] == ptags[1][2]: |
|
93 | if ptags[0][2] == ptags[1][2]: | |
94 | # The tuples are laid out so the right one can be found by |
|
94 | # The tuples are laid out so the right one can be found by | |
95 | # comparison in this case. |
|
95 | # comparison in this case. | |
96 | pdate, pdist, ptag = max(ptags) |
|
96 | pdate, pdist, ptag = max(ptags) | |
97 | else: |
|
97 | else: | |
98 | def key(x): |
|
98 | def key(x): | |
99 | changessincetag = len(repo.revs('only(%d, %s)', |
|
99 | changessincetag = len(repo.revs('only(%d, %s)', | |
100 | ctx.rev(), x[2][0])) |
|
100 | ctx.rev(), x[2][0])) | |
101 | # Smallest number of changes since tag wins. Date is |
|
101 | # Smallest number of changes since tag wins. Date is | |
102 | # used as tiebreaker. |
|
102 | # used as tiebreaker. | |
103 | return [-changessincetag, x[0]] |
|
103 | return [-changessincetag, x[0]] | |
104 | pdate, pdist, ptag = max(ptags, key=key) |
|
104 | pdate, pdist, ptag = max(ptags, key=key) | |
105 | else: |
|
105 | else: | |
106 | pdate, pdist, ptag = ptags[0] |
|
106 | pdate, pdist, ptag = ptags[0] | |
107 | except KeyError: |
|
107 | except KeyError: | |
108 | # Cache miss - recurse |
|
108 | # Cache miss - recurse | |
109 | todo.append(rev) |
|
109 | todo.append(rev) | |
110 | todo.extend(p.rev() for p in ctx.parents()) |
|
110 | todo.extend(p.rev() for p in ctx.parents()) | |
111 | continue |
|
111 | continue | |
112 | latesttags[rev] = pdate, pdist + 1, ptag |
|
112 | latesttags[rev] = pdate, pdist + 1, ptag | |
113 | return latesttags[rev] |
|
113 | return latesttags[rev] | |
114 |
|
114 | |||
115 | def getrenamedfn(repo, endrev=None): |
|
115 | def getrenamedfn(repo, endrev=None): | |
116 | rcache = {} |
|
116 | rcache = {} | |
117 | if endrev is None: |
|
117 | if endrev is None: | |
118 | endrev = len(repo) |
|
118 | endrev = len(repo) | |
119 |
|
119 | |||
120 | def getrenamed(fn, rev): |
|
120 | def getrenamed(fn, rev): | |
121 | '''looks up all renames for a file (up to endrev) the first |
|
121 | '''looks up all renames for a file (up to endrev) the first | |
122 | time the file is given. It indexes on the changerev and only |
|
122 | time the file is given. It indexes on the changerev and only | |
123 | parses the manifest if linkrev != changerev. |
|
123 | parses the manifest if linkrev != changerev. | |
124 | Returns rename info for fn at changerev rev.''' |
|
124 | Returns rename info for fn at changerev rev.''' | |
125 | if fn not in rcache: |
|
125 | if fn not in rcache: | |
126 | rcache[fn] = {} |
|
126 | rcache[fn] = {} | |
127 | fl = repo.file(fn) |
|
127 | fl = repo.file(fn) | |
128 | for i in fl: |
|
128 | for i in fl: | |
129 | lr = fl.linkrev(i) |
|
129 | lr = fl.linkrev(i) | |
130 | renamed = fl.renamed(fl.node(i)) |
|
130 | renamed = fl.renamed(fl.node(i)) | |
131 | rcache[fn][lr] = renamed |
|
131 | rcache[fn][lr] = renamed | |
132 | if lr >= endrev: |
|
132 | if lr >= endrev: | |
133 | break |
|
133 | break | |
134 | if rev in rcache[fn]: |
|
134 | if rev in rcache[fn]: | |
135 | return rcache[fn][rev] |
|
135 | return rcache[fn][rev] | |
136 |
|
136 | |||
137 | # If linkrev != rev (i.e. rev not found in rcache) fallback to |
|
137 | # If linkrev != rev (i.e. rev not found in rcache) fallback to | |
138 | # filectx logic. |
|
138 | # filectx logic. | |
139 | try: |
|
139 | try: | |
140 | return repo[rev][fn].renamed() |
|
140 | return repo[rev][fn].renamed() | |
141 | except error.LookupError: |
|
141 | except error.LookupError: | |
142 | return None |
|
142 | return None | |
143 |
|
143 | |||
144 | return getrenamed |
|
144 | return getrenamed | |
145 |
|
145 | |||
146 | def getlogcolumns(): |
|
146 | def getlogcolumns(): | |
147 | """Return a dict of log column labels""" |
|
147 | """Return a dict of log column labels""" | |
148 | _ = pycompat.identity # temporarily disable gettext |
|
148 | _ = pycompat.identity # temporarily disable gettext | |
149 | # i18n: column positioning for "hg log" |
|
149 | # i18n: column positioning for "hg log" | |
150 | columns = _('bookmark: %s\n' |
|
150 | columns = _('bookmark: %s\n' | |
151 | 'branch: %s\n' |
|
151 | 'branch: %s\n' | |
152 | 'changeset: %s\n' |
|
152 | 'changeset: %s\n' | |
153 | 'copies: %s\n' |
|
153 | 'copies: %s\n' | |
154 | 'date: %s\n' |
|
154 | 'date: %s\n' | |
155 | 'extra: %s=%s\n' |
|
155 | 'extra: %s=%s\n' | |
156 | 'files+: %s\n' |
|
156 | 'files+: %s\n' | |
157 | 'files-: %s\n' |
|
157 | 'files-: %s\n' | |
158 | 'files: %s\n' |
|
158 | 'files: %s\n' | |
159 | 'instability: %s\n' |
|
159 | 'instability: %s\n' | |
160 | 'manifest: %s\n' |
|
160 | 'manifest: %s\n' | |
161 | 'obsolete: %s\n' |
|
161 | 'obsolete: %s\n' | |
162 | 'parent: %s\n' |
|
162 | 'parent: %s\n' | |
163 | 'phase: %s\n' |
|
163 | 'phase: %s\n' | |
164 | 'summary: %s\n' |
|
164 | 'summary: %s\n' | |
165 | 'tag: %s\n' |
|
165 | 'tag: %s\n' | |
166 | 'user: %s\n') |
|
166 | 'user: %s\n') | |
167 | return dict(zip([s.split(':', 1)[0] for s in columns.splitlines()], |
|
167 | return dict(zip([s.split(':', 1)[0] for s in columns.splitlines()], | |
168 | i18n._(columns).splitlines(True))) |
|
168 | i18n._(columns).splitlines(True))) | |
169 |
|
169 | |||
170 | # default templates internally used for rendering of lists |
|
170 | # default templates internally used for rendering of lists | |
171 | defaulttempl = { |
|
171 | defaulttempl = { | |
172 | 'parent': '{rev}:{node|formatnode} ', |
|
172 | 'parent': '{rev}:{node|formatnode} ', | |
173 | 'manifest': '{rev}:{node|formatnode}', |
|
173 | 'manifest': '{rev}:{node|formatnode}', | |
174 | 'file_copy': '{name} ({source})', |
|
174 | 'file_copy': '{name} ({source})', | |
175 | 'envvar': '{key}={value}', |
|
175 | 'envvar': '{key}={value}', | |
176 | 'extra': '{key}={value|stringescape}' |
|
176 | 'extra': '{key}={value|stringescape}' | |
177 | } |
|
177 | } | |
178 | # filecopy is preserved for compatibility reasons |
|
178 | # filecopy is preserved for compatibility reasons | |
179 | defaulttempl['filecopy'] = defaulttempl['file_copy'] |
|
179 | defaulttempl['filecopy'] = defaulttempl['file_copy'] | |
180 |
|
180 | |||
181 | # keywords are callables (see registrar.templatekeyword for details) |
|
181 | # keywords are callables (see registrar.templatekeyword for details) | |
182 | keywords = {} |
|
182 | keywords = {} | |
183 | templatekeyword = registrar.templatekeyword(keywords) |
|
183 | templatekeyword = registrar.templatekeyword(keywords) | |
184 |
|
184 | |||
185 | @templatekeyword('author', requires={'ctx'}) |
|
185 | @templatekeyword('author', requires={'ctx'}) | |
186 | def showauthor(context, mapping): |
|
186 | def showauthor(context, mapping): | |
187 | """String. The unmodified author of the changeset.""" |
|
187 | """String. The unmodified author of the changeset.""" | |
188 | ctx = context.resource(mapping, 'ctx') |
|
188 | ctx = context.resource(mapping, 'ctx') | |
189 | return ctx.user() |
|
189 | return ctx.user() | |
190 |
|
190 | |||
191 | @templatekeyword('bisect', requires={'repo', 'ctx'}) |
|
191 | @templatekeyword('bisect', requires={'repo', 'ctx'}) | |
192 | def showbisect(context, mapping): |
|
192 | def showbisect(context, mapping): | |
193 | """String. The changeset bisection status.""" |
|
193 | """String. The changeset bisection status.""" | |
194 | repo = context.resource(mapping, 'repo') |
|
194 | repo = context.resource(mapping, 'repo') | |
195 | ctx = context.resource(mapping, 'ctx') |
|
195 | ctx = context.resource(mapping, 'ctx') | |
196 | return hbisect.label(repo, ctx.node()) |
|
196 | return hbisect.label(repo, ctx.node()) | |
197 |
|
197 | |||
198 | @templatekeyword('branch', requires={'ctx'}) |
|
198 | @templatekeyword('branch', requires={'ctx'}) | |
199 | def showbranch(context, mapping): |
|
199 | def showbranch(context, mapping): | |
200 | """String. The name of the branch on which the changeset was |
|
200 | """String. The name of the branch on which the changeset was | |
201 | committed. |
|
201 | committed. | |
202 | """ |
|
202 | """ | |
203 | ctx = context.resource(mapping, 'ctx') |
|
203 | ctx = context.resource(mapping, 'ctx') | |
204 | return ctx.branch() |
|
204 | return ctx.branch() | |
205 |
|
205 | |||
206 | @templatekeyword('branches', requires={'ctx', 'templ'}) |
|
206 | @templatekeyword('branches', requires={'ctx', 'templ'}) | |
207 | def showbranches(context, mapping): |
|
207 | def showbranches(context, mapping): | |
208 | """List of strings. The name of the branch on which the |
|
208 | """List of strings. The name of the branch on which the | |
209 | changeset was committed. Will be empty if the branch name was |
|
209 | changeset was committed. Will be empty if the branch name was | |
210 | default. (DEPRECATED) |
|
210 | default. (DEPRECATED) | |
211 | """ |
|
211 | """ | |
212 | ctx = context.resource(mapping, 'ctx') |
|
212 | ctx = context.resource(mapping, 'ctx') | |
213 | branch = ctx.branch() |
|
213 | branch = ctx.branch() | |
214 | if branch != 'default': |
|
214 | if branch != 'default': | |
215 | return compatlist(context, mapping, 'branch', [branch], |
|
215 | return compatlist(context, mapping, 'branch', [branch], | |
216 | plural='branches') |
|
216 | plural='branches') | |
217 | return compatlist(context, mapping, 'branch', [], plural='branches') |
|
217 | return compatlist(context, mapping, 'branch', [], plural='branches') | |
218 |
|
218 | |||
219 | @templatekeyword('bookmarks', requires={'repo', 'ctx', 'templ'}) |
|
219 | @templatekeyword('bookmarks', requires={'repo', 'ctx', 'templ'}) | |
220 | def showbookmarks(context, mapping): |
|
220 | def showbookmarks(context, mapping): | |
221 | """List of strings. Any bookmarks associated with the |
|
221 | """List of strings. Any bookmarks associated with the | |
222 | changeset. Also sets 'active', the name of the active bookmark. |
|
222 | changeset. Also sets 'active', the name of the active bookmark. | |
223 | """ |
|
223 | """ | |
224 | repo = context.resource(mapping, 'repo') |
|
224 | repo = context.resource(mapping, 'repo') | |
225 | ctx = context.resource(mapping, 'ctx') |
|
225 | ctx = context.resource(mapping, 'ctx') | |
226 | templ = context.resource(mapping, 'templ') |
|
226 | templ = context.resource(mapping, 'templ') | |
227 | bookmarks = ctx.bookmarks() |
|
227 | bookmarks = ctx.bookmarks() | |
228 | active = repo._activebookmark |
|
228 | active = repo._activebookmark | |
229 | makemap = lambda v: {'bookmark': v, 'active': active, 'current': active} |
|
229 | makemap = lambda v: {'bookmark': v, 'active': active, 'current': active} | |
230 | f = _showlist('bookmark', bookmarks, templ, mapping) |
|
230 | f = _showlist('bookmark', bookmarks, templ, mapping) | |
231 | return _hybrid(f, bookmarks, makemap, pycompat.identity) |
|
231 | return _hybrid(f, bookmarks, makemap, pycompat.identity) | |
232 |
|
232 | |||
233 | @templatekeyword('children', requires={'ctx', 'templ'}) |
|
233 | @templatekeyword('children', requires={'ctx', 'templ'}) | |
234 | def showchildren(context, mapping): |
|
234 | def showchildren(context, mapping): | |
235 | """List of strings. The children of the changeset.""" |
|
235 | """List of strings. The children of the changeset.""" | |
236 | ctx = context.resource(mapping, 'ctx') |
|
236 | ctx = context.resource(mapping, 'ctx') | |
237 | childrevs = ['%d:%s' % (cctx.rev(), cctx) for cctx in ctx.children()] |
|
237 | childrevs = ['%d:%s' % (cctx.rev(), cctx) for cctx in ctx.children()] | |
238 | return compatlist(context, mapping, 'children', childrevs, element='child') |
|
238 | return compatlist(context, mapping, 'children', childrevs, element='child') | |
239 |
|
239 | |||
240 | # Deprecated, but kept alive for help generation a purpose. |
|
240 | # Deprecated, but kept alive for help generation a purpose. | |
241 | @templatekeyword('currentbookmark', requires={'repo', 'ctx'}) |
|
241 | @templatekeyword('currentbookmark', requires={'repo', 'ctx'}) | |
242 | def showcurrentbookmark(context, mapping): |
|
242 | def showcurrentbookmark(context, mapping): | |
243 | """String. The active bookmark, if it is associated with the changeset. |
|
243 | """String. The active bookmark, if it is associated with the changeset. | |
244 | (DEPRECATED)""" |
|
244 | (DEPRECATED)""" | |
245 | return showactivebookmark(context, mapping) |
|
245 | return showactivebookmark(context, mapping) | |
246 |
|
246 | |||
247 | @templatekeyword('activebookmark', requires={'repo', 'ctx'}) |
|
247 | @templatekeyword('activebookmark', requires={'repo', 'ctx'}) | |
248 | def showactivebookmark(context, mapping): |
|
248 | def showactivebookmark(context, mapping): | |
249 | """String. The active bookmark, if it is associated with the changeset.""" |
|
249 | """String. The active bookmark, if it is associated with the changeset.""" | |
250 | repo = context.resource(mapping, 'repo') |
|
250 | repo = context.resource(mapping, 'repo') | |
251 | ctx = context.resource(mapping, 'ctx') |
|
251 | ctx = context.resource(mapping, 'ctx') | |
252 | active = repo._activebookmark |
|
252 | active = repo._activebookmark | |
253 | if active and active in ctx.bookmarks(): |
|
253 | if active and active in ctx.bookmarks(): | |
254 | return active |
|
254 | return active | |
255 | return '' |
|
255 | return '' | |
256 |
|
256 | |||
257 | @templatekeyword('date', requires={'ctx'}) |
|
257 | @templatekeyword('date', requires={'ctx'}) | |
258 | def showdate(context, mapping): |
|
258 | def showdate(context, mapping): | |
259 | """Date information. The date when the changeset was committed.""" |
|
259 | """Date information. The date when the changeset was committed.""" | |
260 | ctx = context.resource(mapping, 'ctx') |
|
260 | ctx = context.resource(mapping, 'ctx') | |
261 | return ctx.date() |
|
261 | return ctx.date() | |
262 |
|
262 | |||
263 | @templatekeyword('desc', requires={'ctx'}) |
|
263 | @templatekeyword('desc', requires={'ctx'}) | |
264 | def showdescription(context, mapping): |
|
264 | def showdescription(context, mapping): | |
265 | """String. The text of the changeset description.""" |
|
265 | """String. The text of the changeset description.""" | |
266 | ctx = context.resource(mapping, 'ctx') |
|
266 | ctx = context.resource(mapping, 'ctx') | |
267 | s = ctx.description() |
|
267 | s = ctx.description() | |
268 | if isinstance(s, encoding.localstr): |
|
268 | if isinstance(s, encoding.localstr): | |
269 | # try hard to preserve utf-8 bytes |
|
269 | # try hard to preserve utf-8 bytes | |
270 | return encoding.tolocal(encoding.fromlocal(s).strip()) |
|
270 | return encoding.tolocal(encoding.fromlocal(s).strip()) | |
271 | else: |
|
271 | else: | |
272 | return s.strip() |
|
272 | return s.strip() | |
273 |
|
273 | |||
274 | @templatekeyword('diffstat', requires={'ctx'}) |
|
274 | @templatekeyword('diffstat', requires={'ctx'}) | |
275 | def showdiffstat(context, mapping): |
|
275 | def showdiffstat(context, mapping): | |
276 | """String. Statistics of changes with the following format: |
|
276 | """String. Statistics of changes with the following format: | |
277 | "modified files: +added/-removed lines" |
|
277 | "modified files: +added/-removed lines" | |
278 | """ |
|
278 | """ | |
279 | ctx = context.resource(mapping, 'ctx') |
|
279 | ctx = context.resource(mapping, 'ctx') | |
280 | stats = patch.diffstatdata(util.iterlines(ctx.diff(noprefix=False))) |
|
280 | stats = patch.diffstatdata(util.iterlines(ctx.diff(noprefix=False))) | |
281 | maxname, maxtotal, adds, removes, binary = patch.diffstatsum(stats) |
|
281 | maxname, maxtotal, adds, removes, binary = patch.diffstatsum(stats) | |
282 | return '%d: +%d/-%d' % (len(stats), adds, removes) |
|
282 | return '%d: +%d/-%d' % (len(stats), adds, removes) | |
283 |
|
283 | |||
284 | @templatekeyword('envvars', requires={'ui', 'templ'}) |
|
284 | @templatekeyword('envvars', requires={'ui', 'templ'}) | |
285 | def showenvvars(context, mapping): |
|
285 | def showenvvars(context, mapping): | |
286 | """A dictionary of environment variables. (EXPERIMENTAL)""" |
|
286 | """A dictionary of environment variables. (EXPERIMENTAL)""" | |
287 | ui = context.resource(mapping, 'ui') |
|
287 | ui = context.resource(mapping, 'ui') | |
288 | env = ui.exportableenviron() |
|
288 | env = ui.exportableenviron() | |
289 | env = util.sortdict((k, env[k]) for k in sorted(env)) |
|
289 | env = util.sortdict((k, env[k]) for k in sorted(env)) | |
290 | return compatdict(context, mapping, 'envvar', env, plural='envvars') |
|
290 | return compatdict(context, mapping, 'envvar', env, plural='envvars') | |
291 |
|
291 | |||
292 | @templatekeyword('extras', requires={'ctx', 'templ'}) |
|
292 | @templatekeyword('extras', requires={'ctx', 'templ'}) | |
293 | def showextras(context, mapping): |
|
293 | def showextras(context, mapping): | |
294 | """List of dicts with key, value entries of the 'extras' |
|
294 | """List of dicts with key, value entries of the 'extras' | |
295 | field of this changeset.""" |
|
295 | field of this changeset.""" | |
296 | ctx = context.resource(mapping, 'ctx') |
|
296 | ctx = context.resource(mapping, 'ctx') | |
297 | templ = context.resource(mapping, 'templ') |
|
297 | templ = context.resource(mapping, 'templ') | |
298 | extras = ctx.extra() |
|
298 | extras = ctx.extra() | |
299 | extras = util.sortdict((k, extras[k]) for k in sorted(extras)) |
|
299 | extras = util.sortdict((k, extras[k]) for k in sorted(extras)) | |
300 | makemap = lambda k: {'key': k, 'value': extras[k]} |
|
300 | makemap = lambda k: {'key': k, 'value': extras[k]} | |
301 | c = [makemap(k) for k in extras] |
|
301 | c = [makemap(k) for k in extras] | |
302 | f = _showlist('extra', c, templ, mapping, plural='extras') |
|
302 | f = _showlist('extra', c, templ, mapping, plural='extras') | |
303 | return _hybrid(f, extras, makemap, |
|
303 | return _hybrid(f, extras, makemap, | |
304 | lambda k: '%s=%s' % (k, util.escapestr(extras[k]))) |
|
304 | lambda k: '%s=%s' % (k, util.escapestr(extras[k]))) | |
305 |
|
305 | |||
306 | def _showfilesbystat(context, mapping, name, index): |
|
306 | def _showfilesbystat(context, mapping, name, index): | |
307 | repo = context.resource(mapping, 'repo') |
|
307 | repo = context.resource(mapping, 'repo') | |
308 | ctx = context.resource(mapping, 'ctx') |
|
308 | ctx = context.resource(mapping, 'ctx') | |
309 | revcache = context.resource(mapping, 'revcache') |
|
309 | revcache = context.resource(mapping, 'revcache') | |
310 | if 'files' not in revcache: |
|
310 | if 'files' not in revcache: | |
311 | revcache['files'] = repo.status(ctx.p1(), ctx)[:3] |
|
311 | revcache['files'] = repo.status(ctx.p1(), ctx)[:3] | |
312 | files = revcache['files'][index] |
|
312 | files = revcache['files'][index] | |
313 | return compatlist(context, mapping, name, files, element='file') |
|
313 | return compatlist(context, mapping, name, files, element='file') | |
314 |
|
314 | |||
315 | @templatekeyword('file_adds', requires={'repo', 'ctx', 'revcache', 'templ'}) |
|
315 | @templatekeyword('file_adds', requires={'repo', 'ctx', 'revcache', 'templ'}) | |
316 | def showfileadds(context, mapping): |
|
316 | def showfileadds(context, mapping): | |
317 | """List of strings. Files added by this changeset.""" |
|
317 | """List of strings. Files added by this changeset.""" | |
318 | return _showfilesbystat(context, mapping, 'file_add', 1) |
|
318 | return _showfilesbystat(context, mapping, 'file_add', 1) | |
319 |
|
319 | |||
320 | @templatekeyword('file_copies', |
|
320 | @templatekeyword('file_copies', | |
321 | requires={'repo', 'ctx', 'cache', 'revcache', 'templ'}) |
|
321 | requires={'repo', 'ctx', 'cache', 'revcache', 'templ'}) | |
322 | def showfilecopies(context, mapping): |
|
322 | def showfilecopies(context, mapping): | |
323 | """List of strings. Files copied in this changeset with |
|
323 | """List of strings. Files copied in this changeset with | |
324 | their sources. |
|
324 | their sources. | |
325 | """ |
|
325 | """ | |
326 | repo = context.resource(mapping, 'repo') |
|
326 | repo = context.resource(mapping, 'repo') | |
327 | ctx = context.resource(mapping, 'ctx') |
|
327 | ctx = context.resource(mapping, 'ctx') | |
328 | cache = context.resource(mapping, 'cache') |
|
328 | cache = context.resource(mapping, 'cache') | |
329 | copies = context.resource(mapping, 'revcache').get('copies') |
|
329 | copies = context.resource(mapping, 'revcache').get('copies') | |
330 | if copies is None: |
|
330 | if copies is None: | |
331 | if 'getrenamed' not in cache: |
|
331 | if 'getrenamed' not in cache: | |
332 | cache['getrenamed'] = getrenamedfn(repo) |
|
332 | cache['getrenamed'] = getrenamedfn(repo) | |
333 | copies = [] |
|
333 | copies = [] | |
334 | getrenamed = cache['getrenamed'] |
|
334 | getrenamed = cache['getrenamed'] | |
335 | for fn in ctx.files(): |
|
335 | for fn in ctx.files(): | |
336 | rename = getrenamed(fn, ctx.rev()) |
|
336 | rename = getrenamed(fn, ctx.rev()) | |
337 | if rename: |
|
337 | if rename: | |
338 | copies.append((fn, rename[0])) |
|
338 | copies.append((fn, rename[0])) | |
339 |
|
339 | |||
340 | copies = util.sortdict(copies) |
|
340 | copies = util.sortdict(copies) | |
341 | return compatdict(context, mapping, 'file_copy', copies, |
|
341 | return compatdict(context, mapping, 'file_copy', copies, | |
342 | key='name', value='source', fmt='%s (%s)', |
|
342 | key='name', value='source', fmt='%s (%s)', | |
343 | plural='file_copies') |
|
343 | plural='file_copies') | |
344 |
|
344 | |||
345 | # showfilecopiesswitch() displays file copies only if copy records are |
|
345 | # showfilecopiesswitch() displays file copies only if copy records are | |
346 | # provided before calling the templater, usually with a --copies |
|
346 | # provided before calling the templater, usually with a --copies | |
347 | # command line switch. |
|
347 | # command line switch. | |
348 | @templatekeyword('file_copies_switch', requires={'revcache', 'templ'}) |
|
348 | @templatekeyword('file_copies_switch', requires={'revcache', 'templ'}) | |
349 | def showfilecopiesswitch(context, mapping): |
|
349 | def showfilecopiesswitch(context, mapping): | |
350 | """List of strings. Like "file_copies" but displayed |
|
350 | """List of strings. Like "file_copies" but displayed | |
351 | only if the --copied switch is set. |
|
351 | only if the --copied switch is set. | |
352 | """ |
|
352 | """ | |
353 | copies = context.resource(mapping, 'revcache').get('copies') or [] |
|
353 | copies = context.resource(mapping, 'revcache').get('copies') or [] | |
354 | copies = util.sortdict(copies) |
|
354 | copies = util.sortdict(copies) | |
355 | return compatdict(context, mapping, 'file_copy', copies, |
|
355 | return compatdict(context, mapping, 'file_copy', copies, | |
356 | key='name', value='source', fmt='%s (%s)', |
|
356 | key='name', value='source', fmt='%s (%s)', | |
357 | plural='file_copies') |
|
357 | plural='file_copies') | |
358 |
|
358 | |||
359 | @templatekeyword('file_dels', requires={'repo', 'ctx', 'revcache', 'templ'}) |
|
359 | @templatekeyword('file_dels', requires={'repo', 'ctx', 'revcache', 'templ'}) | |
360 | def showfiledels(context, mapping): |
|
360 | def showfiledels(context, mapping): | |
361 | """List of strings. Files removed by this changeset.""" |
|
361 | """List of strings. Files removed by this changeset.""" | |
362 | return _showfilesbystat(context, mapping, 'file_del', 2) |
|
362 | return _showfilesbystat(context, mapping, 'file_del', 2) | |
363 |
|
363 | |||
364 | @templatekeyword('file_mods', requires={'repo', 'ctx', 'revcache', 'templ'}) |
|
364 | @templatekeyword('file_mods', requires={'repo', 'ctx', 'revcache', 'templ'}) | |
365 | def showfilemods(context, mapping): |
|
365 | def showfilemods(context, mapping): | |
366 | """List of strings. Files modified by this changeset.""" |
|
366 | """List of strings. Files modified by this changeset.""" | |
367 | return _showfilesbystat(context, mapping, 'file_mod', 0) |
|
367 | return _showfilesbystat(context, mapping, 'file_mod', 0) | |
368 |
|
368 | |||
369 | @templatekeyword('files', requires={'ctx', 'templ'}) |
|
369 | @templatekeyword('files', requires={'ctx', 'templ'}) | |
370 | def showfiles(context, mapping): |
|
370 | def showfiles(context, mapping): | |
371 | """List of strings. All files modified, added, or removed by this |
|
371 | """List of strings. All files modified, added, or removed by this | |
372 | changeset. |
|
372 | changeset. | |
373 | """ |
|
373 | """ | |
374 | ctx = context.resource(mapping, 'ctx') |
|
374 | ctx = context.resource(mapping, 'ctx') | |
375 | return compatlist(context, mapping, 'file', ctx.files()) |
|
375 | return compatlist(context, mapping, 'file', ctx.files()) | |
376 |
|
376 | |||
377 | @templatekeyword('graphnode', requires={'repo', 'ctx'}) |
|
377 | @templatekeyword('graphnode', requires={'repo', 'ctx'}) | |
378 | def showgraphnode(context, mapping): |
|
378 | def showgraphnode(context, mapping): | |
379 | """String. The character representing the changeset node in an ASCII |
|
379 | """String. The character representing the changeset node in an ASCII | |
380 | revision graph.""" |
|
380 | revision graph.""" | |
381 | repo = context.resource(mapping, 'repo') |
|
381 | repo = context.resource(mapping, 'repo') | |
382 | ctx = context.resource(mapping, 'ctx') |
|
382 | ctx = context.resource(mapping, 'ctx') | |
383 | return getgraphnode(repo, ctx) |
|
383 | return getgraphnode(repo, ctx) | |
384 |
|
384 | |||
385 | def getgraphnode(repo, ctx): |
|
385 | def getgraphnode(repo, ctx): | |
386 | wpnodes = repo.dirstate.parents() |
|
386 | wpnodes = repo.dirstate.parents() | |
387 | if wpnodes[1] == nullid: |
|
387 | if wpnodes[1] == nullid: | |
388 | wpnodes = wpnodes[:1] |
|
388 | wpnodes = wpnodes[:1] | |
389 | if ctx.node() in wpnodes: |
|
389 | if ctx.node() in wpnodes: | |
390 | return '@' |
|
390 | return '@' | |
391 | elif ctx.obsolete(): |
|
391 | elif ctx.obsolete(): | |
392 | return 'x' |
|
392 | return 'x' | |
393 | elif ctx.isunstable(): |
|
393 | elif ctx.isunstable(): | |
394 | return '*' |
|
394 | return '*' | |
395 | elif ctx.closesbranch(): |
|
395 | elif ctx.closesbranch(): | |
396 | return '_' |
|
396 | return '_' | |
397 | else: |
|
397 | else: | |
398 | return 'o' |
|
398 | return 'o' | |
399 |
|
399 | |||
400 | @templatekeyword('graphwidth', requires=()) |
|
400 | @templatekeyword('graphwidth', requires=()) | |
401 | def showgraphwidth(context, mapping): |
|
401 | def showgraphwidth(context, mapping): | |
402 | """Integer. The width of the graph drawn by 'log --graph' or zero.""" |
|
402 | """Integer. The width of the graph drawn by 'log --graph' or zero.""" | |
403 | # just hosts documentation; should be overridden by template mapping |
|
403 | # just hosts documentation; should be overridden by template mapping | |
404 | return 0 |
|
404 | return 0 | |
405 |
|
405 | |||
406 | @templatekeyword('index', requires=()) |
|
406 | @templatekeyword('index', requires=()) | |
407 | def showindex(context, mapping): |
|
407 | def showindex(context, mapping): | |
408 | """Integer. The current iteration of the loop. (0 indexed)""" |
|
408 | """Integer. The current iteration of the loop. (0 indexed)""" | |
409 | # just hosts documentation; should be overridden by template mapping |
|
409 | # just hosts documentation; should be overridden by template mapping | |
410 | raise error.Abort(_("can't use index in this context")) |
|
410 | raise error.Abort(_("can't use index in this context")) | |
411 |
|
411 | |||
412 | @templatekeyword('latesttag', requires={'repo', 'ctx', 'cache', 'templ'}) |
|
412 | @templatekeyword('latesttag', requires={'repo', 'ctx', 'cache', 'templ'}) | |
413 | def showlatesttag(context, mapping): |
|
413 | def showlatesttag(context, mapping): | |
414 | """List of strings. The global tags on the most recent globally |
|
414 | """List of strings. The global tags on the most recent globally | |
415 | tagged ancestor of this changeset. If no such tags exist, the list |
|
415 | tagged ancestor of this changeset. If no such tags exist, the list | |
416 | consists of the single string "null". |
|
416 | consists of the single string "null". | |
417 | """ |
|
417 | """ | |
418 | return showlatesttags(context, mapping, None) |
|
418 | return showlatesttags(context, mapping, None) | |
419 |
|
419 | |||
420 | def showlatesttags(context, mapping, pattern): |
|
420 | def showlatesttags(context, mapping, pattern): | |
421 | """helper method for the latesttag keyword and function""" |
|
421 | """helper method for the latesttag keyword and function""" | |
422 | latesttags = getlatesttags(context, mapping, pattern) |
|
422 | latesttags = getlatesttags(context, mapping, pattern) | |
423 |
|
423 | |||
424 | # latesttag[0] is an implementation detail for sorting csets on different |
|
424 | # latesttag[0] is an implementation detail for sorting csets on different | |
425 | # branches in a stable manner- it is the date the tagged cset was created, |
|
425 | # branches in a stable manner- it is the date the tagged cset was created, | |
426 | # not the date the tag was created. Therefore it isn't made visible here. |
|
426 | # not the date the tag was created. Therefore it isn't made visible here. | |
427 | makemap = lambda v: { |
|
427 | makemap = lambda v: { | |
428 | 'changes': _showchangessincetag, |
|
428 | 'changes': _showchangessincetag, | |
429 | 'distance': latesttags[1], |
|
429 | 'distance': latesttags[1], | |
430 | 'latesttag': v, # BC with {latesttag % '{latesttag}'} |
|
430 | 'latesttag': v, # BC with {latesttag % '{latesttag}'} | |
431 | 'tag': v |
|
431 | 'tag': v | |
432 | } |
|
432 | } | |
433 |
|
433 | |||
434 | tags = latesttags[2] |
|
434 | tags = latesttags[2] | |
435 | templ = context.resource(mapping, 'templ') |
|
435 | templ = context.resource(mapping, 'templ') | |
436 | f = _showlist('latesttag', tags, templ, mapping, separator=':') |
|
436 | f = _showlist('latesttag', tags, templ, mapping, separator=':') | |
437 | return _hybrid(f, tags, makemap, pycompat.identity) |
|
437 | return _hybrid(f, tags, makemap, pycompat.identity) | |
438 |
|
438 | |||
439 | @templatekeyword('latesttagdistance', requires={'repo', 'ctx', 'cache'}) |
|
439 | @templatekeyword('latesttagdistance', requires={'repo', 'ctx', 'cache'}) | |
440 | def showlatesttagdistance(context, mapping): |
|
440 | def showlatesttagdistance(context, mapping): | |
441 | """Integer. Longest path to the latest tag.""" |
|
441 | """Integer. Longest path to the latest tag.""" | |
442 | return getlatesttags(context, mapping)[1] |
|
442 | return getlatesttags(context, mapping)[1] | |
443 |
|
443 | |||
444 | @templatekeyword('changessincelatesttag', requires={'repo', 'ctx', 'cache'}) |
|
444 | @templatekeyword('changessincelatesttag', requires={'repo', 'ctx', 'cache'}) | |
445 | def showchangessincelatesttag(context, mapping): |
|
445 | def showchangessincelatesttag(context, mapping): | |
446 | """Integer. All ancestors not in the latest tag.""" |
|
446 | """Integer. All ancestors not in the latest tag.""" | |
447 | mapping = mapping.copy() |
|
447 | mapping = mapping.copy() | |
448 | mapping['tag'] = getlatesttags(context, mapping)[2][0] |
|
448 | mapping['tag'] = getlatesttags(context, mapping)[2][0] | |
449 | return _showchangessincetag(context, mapping) |
|
449 | return _showchangessincetag(context, mapping) | |
450 |
|
450 | |||
451 | def _showchangessincetag(context, mapping): |
|
451 | def _showchangessincetag(context, mapping): | |
452 | repo = context.resource(mapping, 'repo') |
|
452 | repo = context.resource(mapping, 'repo') | |
453 | ctx = context.resource(mapping, 'ctx') |
|
453 | ctx = context.resource(mapping, 'ctx') | |
454 | offset = 0 |
|
454 | offset = 0 | |
455 | revs = [ctx.rev()] |
|
455 | revs = [ctx.rev()] | |
456 | tag = context.symbol(mapping, 'tag') |
|
456 | tag = context.symbol(mapping, 'tag') | |
457 |
|
457 | |||
458 | # The only() revset doesn't currently support wdir() |
|
458 | # The only() revset doesn't currently support wdir() | |
459 | if ctx.rev() is None: |
|
459 | if ctx.rev() is None: | |
460 | offset = 1 |
|
460 | offset = 1 | |
461 | revs = [p.rev() for p in ctx.parents()] |
|
461 | revs = [p.rev() for p in ctx.parents()] | |
462 |
|
462 | |||
463 | return len(repo.revs('only(%ld, %s)', revs, tag)) + offset |
|
463 | return len(repo.revs('only(%ld, %s)', revs, tag)) + offset | |
464 |
|
464 | |||
465 | # teach templater latesttags.changes is switched to (context, mapping) API |
|
465 | # teach templater latesttags.changes is switched to (context, mapping) API | |
466 | _showchangessincetag._requires = {'repo', 'ctx'} |
|
466 | _showchangessincetag._requires = {'repo', 'ctx'} | |
467 |
|
467 | |||
468 | @templatekeyword('manifest', requires={'repo', 'ctx', 'templ'}) |
|
468 | @templatekeyword('manifest', requires={'repo', 'ctx', 'templ'}) | |
469 | def showmanifest(context, mapping): |
|
469 | def showmanifest(context, mapping): | |
470 | repo = context.resource(mapping, 'repo') |
|
470 | repo = context.resource(mapping, 'repo') | |
471 | ctx = context.resource(mapping, 'ctx') |
|
471 | ctx = context.resource(mapping, 'ctx') | |
472 | templ = context.resource(mapping, 'templ') |
|
472 | templ = context.resource(mapping, 'templ') | |
473 | mnode = ctx.manifestnode() |
|
473 | mnode = ctx.manifestnode() | |
474 | if mnode is None: |
|
474 | if mnode is None: | |
475 | # just avoid crash, we might want to use the 'ff...' hash in future |
|
475 | # just avoid crash, we might want to use the 'ff...' hash in future | |
476 | return |
|
476 | return | |
477 | mrev = repo.manifestlog._revlog.rev(mnode) |
|
477 | mrev = repo.manifestlog._revlog.rev(mnode) | |
478 | mhex = hex(mnode) |
|
478 | mhex = hex(mnode) | |
479 | mapping = mapping.copy() |
|
479 | mapping = mapping.copy() | |
480 | mapping.update({'rev': mrev, 'node': mhex}) |
|
480 | mapping.update({'rev': mrev, 'node': mhex}) | |
481 |
f = templ('manifest', |
|
481 | f = templ.generate('manifest', mapping) | |
482 | # TODO: perhaps 'ctx' should be dropped from mapping because manifest |
|
482 | # TODO: perhaps 'ctx' should be dropped from mapping because manifest | |
483 | # rev and node are completely different from changeset's. |
|
483 | # rev and node are completely different from changeset's. | |
484 | return _mappable(f, None, f, lambda x: {'rev': mrev, 'node': mhex}) |
|
484 | return _mappable(f, None, f, lambda x: {'rev': mrev, 'node': mhex}) | |
485 |
|
485 | |||
486 | @templatekeyword('obsfate', requires={'ui', 'repo', 'ctx', 'templ'}) |
|
486 | @templatekeyword('obsfate', requires={'ui', 'repo', 'ctx', 'templ'}) | |
487 | def showobsfate(context, mapping): |
|
487 | def showobsfate(context, mapping): | |
488 | # this function returns a list containing pre-formatted obsfate strings. |
|
488 | # this function returns a list containing pre-formatted obsfate strings. | |
489 | # |
|
489 | # | |
490 | # This function will be replaced by templates fragments when we will have |
|
490 | # This function will be replaced by templates fragments when we will have | |
491 | # the verbosity templatekw available. |
|
491 | # the verbosity templatekw available. | |
492 | succsandmarkers = showsuccsandmarkers(context, mapping) |
|
492 | succsandmarkers = showsuccsandmarkers(context, mapping) | |
493 |
|
493 | |||
494 | ui = context.resource(mapping, 'ui') |
|
494 | ui = context.resource(mapping, 'ui') | |
495 | values = [] |
|
495 | values = [] | |
496 |
|
496 | |||
497 | for x in succsandmarkers: |
|
497 | for x in succsandmarkers: | |
498 | values.append(obsutil.obsfateprinter(x['successors'], x['markers'], ui)) |
|
498 | values.append(obsutil.obsfateprinter(x['successors'], x['markers'], ui)) | |
499 |
|
499 | |||
500 | return compatlist(context, mapping, "fate", values) |
|
500 | return compatlist(context, mapping, "fate", values) | |
501 |
|
501 | |||
502 | def shownames(context, mapping, namespace): |
|
502 | def shownames(context, mapping, namespace): | |
503 | """helper method to generate a template keyword for a namespace""" |
|
503 | """helper method to generate a template keyword for a namespace""" | |
504 | repo = context.resource(mapping, 'repo') |
|
504 | repo = context.resource(mapping, 'repo') | |
505 | ctx = context.resource(mapping, 'ctx') |
|
505 | ctx = context.resource(mapping, 'ctx') | |
506 | ns = repo.names[namespace] |
|
506 | ns = repo.names[namespace] | |
507 | names = ns.names(repo, ctx.node()) |
|
507 | names = ns.names(repo, ctx.node()) | |
508 | return compatlist(context, mapping, ns.templatename, names, |
|
508 | return compatlist(context, mapping, ns.templatename, names, | |
509 | plural=namespace) |
|
509 | plural=namespace) | |
510 |
|
510 | |||
511 | @templatekeyword('namespaces', requires={'repo', 'ctx', 'templ'}) |
|
511 | @templatekeyword('namespaces', requires={'repo', 'ctx', 'templ'}) | |
512 | def shownamespaces(context, mapping): |
|
512 | def shownamespaces(context, mapping): | |
513 | """Dict of lists. Names attached to this changeset per |
|
513 | """Dict of lists. Names attached to this changeset per | |
514 | namespace.""" |
|
514 | namespace.""" | |
515 | repo = context.resource(mapping, 'repo') |
|
515 | repo = context.resource(mapping, 'repo') | |
516 | ctx = context.resource(mapping, 'ctx') |
|
516 | ctx = context.resource(mapping, 'ctx') | |
517 | templ = context.resource(mapping, 'templ') |
|
517 | templ = context.resource(mapping, 'templ') | |
518 |
|
518 | |||
519 | namespaces = util.sortdict() |
|
519 | namespaces = util.sortdict() | |
520 | def makensmapfn(ns): |
|
520 | def makensmapfn(ns): | |
521 | # 'name' for iterating over namespaces, templatename for local reference |
|
521 | # 'name' for iterating over namespaces, templatename for local reference | |
522 | return lambda v: {'name': v, ns.templatename: v} |
|
522 | return lambda v: {'name': v, ns.templatename: v} | |
523 |
|
523 | |||
524 | for k, ns in repo.names.iteritems(): |
|
524 | for k, ns in repo.names.iteritems(): | |
525 | names = ns.names(repo, ctx.node()) |
|
525 | names = ns.names(repo, ctx.node()) | |
526 | f = _showlist('name', names, templ, mapping) |
|
526 | f = _showlist('name', names, templ, mapping) | |
527 | namespaces[k] = _hybrid(f, names, makensmapfn(ns), pycompat.identity) |
|
527 | namespaces[k] = _hybrid(f, names, makensmapfn(ns), pycompat.identity) | |
528 |
|
528 | |||
529 | f = _showlist('namespace', list(namespaces), templ, mapping) |
|
529 | f = _showlist('namespace', list(namespaces), templ, mapping) | |
530 |
|
530 | |||
531 | def makemap(ns): |
|
531 | def makemap(ns): | |
532 | return { |
|
532 | return { | |
533 | 'namespace': ns, |
|
533 | 'namespace': ns, | |
534 | 'names': namespaces[ns], |
|
534 | 'names': namespaces[ns], | |
535 | 'builtin': repo.names[ns].builtin, |
|
535 | 'builtin': repo.names[ns].builtin, | |
536 | 'colorname': repo.names[ns].colorname, |
|
536 | 'colorname': repo.names[ns].colorname, | |
537 | } |
|
537 | } | |
538 |
|
538 | |||
539 | return _hybrid(f, namespaces, makemap, pycompat.identity) |
|
539 | return _hybrid(f, namespaces, makemap, pycompat.identity) | |
540 |
|
540 | |||
541 | @templatekeyword('node', requires={'ctx'}) |
|
541 | @templatekeyword('node', requires={'ctx'}) | |
542 | def shownode(context, mapping): |
|
542 | def shownode(context, mapping): | |
543 | """String. The changeset identification hash, as a 40 hexadecimal |
|
543 | """String. The changeset identification hash, as a 40 hexadecimal | |
544 | digit string. |
|
544 | digit string. | |
545 | """ |
|
545 | """ | |
546 | ctx = context.resource(mapping, 'ctx') |
|
546 | ctx = context.resource(mapping, 'ctx') | |
547 | return ctx.hex() |
|
547 | return ctx.hex() | |
548 |
|
548 | |||
549 | @templatekeyword('obsolete', requires={'ctx'}) |
|
549 | @templatekeyword('obsolete', requires={'ctx'}) | |
550 | def showobsolete(context, mapping): |
|
550 | def showobsolete(context, mapping): | |
551 | """String. Whether the changeset is obsolete. (EXPERIMENTAL)""" |
|
551 | """String. Whether the changeset is obsolete. (EXPERIMENTAL)""" | |
552 | ctx = context.resource(mapping, 'ctx') |
|
552 | ctx = context.resource(mapping, 'ctx') | |
553 | if ctx.obsolete(): |
|
553 | if ctx.obsolete(): | |
554 | return 'obsolete' |
|
554 | return 'obsolete' | |
555 | return '' |
|
555 | return '' | |
556 |
|
556 | |||
557 | @templatekeyword('peerurls', requires={'repo'}) |
|
557 | @templatekeyword('peerurls', requires={'repo'}) | |
558 | def showpeerurls(context, mapping): |
|
558 | def showpeerurls(context, mapping): | |
559 | """A dictionary of repository locations defined in the [paths] section |
|
559 | """A dictionary of repository locations defined in the [paths] section | |
560 | of your configuration file.""" |
|
560 | of your configuration file.""" | |
561 | repo = context.resource(mapping, 'repo') |
|
561 | repo = context.resource(mapping, 'repo') | |
562 | # see commands.paths() for naming of dictionary keys |
|
562 | # see commands.paths() for naming of dictionary keys | |
563 | paths = repo.ui.paths |
|
563 | paths = repo.ui.paths | |
564 | urls = util.sortdict((k, p.rawloc) for k, p in sorted(paths.iteritems())) |
|
564 | urls = util.sortdict((k, p.rawloc) for k, p in sorted(paths.iteritems())) | |
565 | def makemap(k): |
|
565 | def makemap(k): | |
566 | p = paths[k] |
|
566 | p = paths[k] | |
567 | d = {'name': k, 'url': p.rawloc} |
|
567 | d = {'name': k, 'url': p.rawloc} | |
568 | d.update((o, v) for o, v in sorted(p.suboptions.iteritems())) |
|
568 | d.update((o, v) for o, v in sorted(p.suboptions.iteritems())) | |
569 | return d |
|
569 | return d | |
570 | return _hybrid(None, urls, makemap, lambda k: '%s=%s' % (k, urls[k])) |
|
570 | return _hybrid(None, urls, makemap, lambda k: '%s=%s' % (k, urls[k])) | |
571 |
|
571 | |||
572 | @templatekeyword("predecessors", requires={'repo', 'ctx'}) |
|
572 | @templatekeyword("predecessors", requires={'repo', 'ctx'}) | |
573 | def showpredecessors(context, mapping): |
|
573 | def showpredecessors(context, mapping): | |
574 | """Returns the list if the closest visible successors. (EXPERIMENTAL)""" |
|
574 | """Returns the list if the closest visible successors. (EXPERIMENTAL)""" | |
575 | repo = context.resource(mapping, 'repo') |
|
575 | repo = context.resource(mapping, 'repo') | |
576 | ctx = context.resource(mapping, 'ctx') |
|
576 | ctx = context.resource(mapping, 'ctx') | |
577 | predecessors = sorted(obsutil.closestpredecessors(repo, ctx.node())) |
|
577 | predecessors = sorted(obsutil.closestpredecessors(repo, ctx.node())) | |
578 | predecessors = map(hex, predecessors) |
|
578 | predecessors = map(hex, predecessors) | |
579 |
|
579 | |||
580 | return _hybrid(None, predecessors, |
|
580 | return _hybrid(None, predecessors, | |
581 | lambda x: {'ctx': repo[x], 'revcache': {}}, |
|
581 | lambda x: {'ctx': repo[x], 'revcache': {}}, | |
582 | lambda x: scmutil.formatchangeid(repo[x])) |
|
582 | lambda x: scmutil.formatchangeid(repo[x])) | |
583 |
|
583 | |||
584 | @templatekeyword('reporoot', requires={'repo'}) |
|
584 | @templatekeyword('reporoot', requires={'repo'}) | |
585 | def showreporoot(context, mapping): |
|
585 | def showreporoot(context, mapping): | |
586 | """String. The root directory of the current repository.""" |
|
586 | """String. The root directory of the current repository.""" | |
587 | repo = context.resource(mapping, 'repo') |
|
587 | repo = context.resource(mapping, 'repo') | |
588 | return repo.root |
|
588 | return repo.root | |
589 |
|
589 | |||
590 | @templatekeyword("successorssets", requires={'repo', 'ctx'}) |
|
590 | @templatekeyword("successorssets", requires={'repo', 'ctx'}) | |
591 | def showsuccessorssets(context, mapping): |
|
591 | def showsuccessorssets(context, mapping): | |
592 | """Returns a string of sets of successors for a changectx. Format used |
|
592 | """Returns a string of sets of successors for a changectx. Format used | |
593 | is: [ctx1, ctx2], [ctx3] if ctx has been splitted into ctx1 and ctx2 |
|
593 | is: [ctx1, ctx2], [ctx3] if ctx has been splitted into ctx1 and ctx2 | |
594 | while also diverged into ctx3. (EXPERIMENTAL)""" |
|
594 | while also diverged into ctx3. (EXPERIMENTAL)""" | |
595 | repo = context.resource(mapping, 'repo') |
|
595 | repo = context.resource(mapping, 'repo') | |
596 | ctx = context.resource(mapping, 'ctx') |
|
596 | ctx = context.resource(mapping, 'ctx') | |
597 | if not ctx.obsolete(): |
|
597 | if not ctx.obsolete(): | |
598 | return '' |
|
598 | return '' | |
599 |
|
599 | |||
600 | ssets = obsutil.successorssets(repo, ctx.node(), closest=True) |
|
600 | ssets = obsutil.successorssets(repo, ctx.node(), closest=True) | |
601 | ssets = [[hex(n) for n in ss] for ss in ssets] |
|
601 | ssets = [[hex(n) for n in ss] for ss in ssets] | |
602 |
|
602 | |||
603 | data = [] |
|
603 | data = [] | |
604 | for ss in ssets: |
|
604 | for ss in ssets: | |
605 | h = _hybrid(None, ss, lambda x: {'ctx': repo[x], 'revcache': {}}, |
|
605 | h = _hybrid(None, ss, lambda x: {'ctx': repo[x], 'revcache': {}}, | |
606 | lambda x: scmutil.formatchangeid(repo[x])) |
|
606 | lambda x: scmutil.formatchangeid(repo[x])) | |
607 | data.append(h) |
|
607 | data.append(h) | |
608 |
|
608 | |||
609 | # Format the successorssets |
|
609 | # Format the successorssets | |
610 | def render(d): |
|
610 | def render(d): | |
611 | t = [] |
|
611 | t = [] | |
612 | for i in d.gen(): |
|
612 | for i in d.gen(): | |
613 | t.append(i) |
|
613 | t.append(i) | |
614 | return "".join(t) |
|
614 | return "".join(t) | |
615 |
|
615 | |||
616 | def gen(data): |
|
616 | def gen(data): | |
617 | yield "; ".join(render(d) for d in data) |
|
617 | yield "; ".join(render(d) for d in data) | |
618 |
|
618 | |||
619 | return _hybrid(gen(data), data, lambda x: {'successorset': x}, |
|
619 | return _hybrid(gen(data), data, lambda x: {'successorset': x}, | |
620 | pycompat.identity) |
|
620 | pycompat.identity) | |
621 |
|
621 | |||
622 | @templatekeyword("succsandmarkers", requires={'repo', 'ctx', 'templ'}) |
|
622 | @templatekeyword("succsandmarkers", requires={'repo', 'ctx', 'templ'}) | |
623 | def showsuccsandmarkers(context, mapping): |
|
623 | def showsuccsandmarkers(context, mapping): | |
624 | """Returns a list of dict for each final successor of ctx. The dict |
|
624 | """Returns a list of dict for each final successor of ctx. The dict | |
625 | contains successors node id in "successors" keys and the list of |
|
625 | contains successors node id in "successors" keys and the list of | |
626 | obs-markers from ctx to the set of successors in "markers". |
|
626 | obs-markers from ctx to the set of successors in "markers". | |
627 | (EXPERIMENTAL) |
|
627 | (EXPERIMENTAL) | |
628 | """ |
|
628 | """ | |
629 | repo = context.resource(mapping, 'repo') |
|
629 | repo = context.resource(mapping, 'repo') | |
630 | ctx = context.resource(mapping, 'ctx') |
|
630 | ctx = context.resource(mapping, 'ctx') | |
631 | templ = context.resource(mapping, 'templ') |
|
631 | templ = context.resource(mapping, 'templ') | |
632 |
|
632 | |||
633 | values = obsutil.successorsandmarkers(repo, ctx) |
|
633 | values = obsutil.successorsandmarkers(repo, ctx) | |
634 |
|
634 | |||
635 | if values is None: |
|
635 | if values is None: | |
636 | values = [] |
|
636 | values = [] | |
637 |
|
637 | |||
638 | # Format successors and markers to avoid exposing binary to templates |
|
638 | # Format successors and markers to avoid exposing binary to templates | |
639 | data = [] |
|
639 | data = [] | |
640 | for i in values: |
|
640 | for i in values: | |
641 | # Format successors |
|
641 | # Format successors | |
642 | successors = i['successors'] |
|
642 | successors = i['successors'] | |
643 |
|
643 | |||
644 | successors = [hex(n) for n in successors] |
|
644 | successors = [hex(n) for n in successors] | |
645 | successors = _hybrid(None, successors, |
|
645 | successors = _hybrid(None, successors, | |
646 | lambda x: {'ctx': repo[x], 'revcache': {}}, |
|
646 | lambda x: {'ctx': repo[x], 'revcache': {}}, | |
647 | lambda x: scmutil.formatchangeid(repo[x])) |
|
647 | lambda x: scmutil.formatchangeid(repo[x])) | |
648 |
|
648 | |||
649 | # Format markers |
|
649 | # Format markers | |
650 | finalmarkers = [] |
|
650 | finalmarkers = [] | |
651 | for m in i['markers']: |
|
651 | for m in i['markers']: | |
652 | hexprec = hex(m[0]) |
|
652 | hexprec = hex(m[0]) | |
653 | hexsucs = tuple(hex(n) for n in m[1]) |
|
653 | hexsucs = tuple(hex(n) for n in m[1]) | |
654 | hexparents = None |
|
654 | hexparents = None | |
655 | if m[5] is not None: |
|
655 | if m[5] is not None: | |
656 | hexparents = tuple(hex(n) for n in m[5]) |
|
656 | hexparents = tuple(hex(n) for n in m[5]) | |
657 | newmarker = (hexprec, hexsucs) + m[2:5] + (hexparents,) + m[6:] |
|
657 | newmarker = (hexprec, hexsucs) + m[2:5] + (hexparents,) + m[6:] | |
658 | finalmarkers.append(newmarker) |
|
658 | finalmarkers.append(newmarker) | |
659 |
|
659 | |||
660 | data.append({'successors': successors, 'markers': finalmarkers}) |
|
660 | data.append({'successors': successors, 'markers': finalmarkers}) | |
661 |
|
661 | |||
662 | f = _showlist('succsandmarkers', data, templ, mapping) |
|
662 | f = _showlist('succsandmarkers', data, templ, mapping) | |
663 | return _hybrid(f, data, lambda x: x, pycompat.identity) |
|
663 | return _hybrid(f, data, lambda x: x, pycompat.identity) | |
664 |
|
664 | |||
665 | @templatekeyword('p1rev', requires={'ctx'}) |
|
665 | @templatekeyword('p1rev', requires={'ctx'}) | |
666 | def showp1rev(context, mapping): |
|
666 | def showp1rev(context, mapping): | |
667 | """Integer. The repository-local revision number of the changeset's |
|
667 | """Integer. The repository-local revision number of the changeset's | |
668 | first parent, or -1 if the changeset has no parents.""" |
|
668 | first parent, or -1 if the changeset has no parents.""" | |
669 | ctx = context.resource(mapping, 'ctx') |
|
669 | ctx = context.resource(mapping, 'ctx') | |
670 | return ctx.p1().rev() |
|
670 | return ctx.p1().rev() | |
671 |
|
671 | |||
672 | @templatekeyword('p2rev', requires={'ctx'}) |
|
672 | @templatekeyword('p2rev', requires={'ctx'}) | |
673 | def showp2rev(context, mapping): |
|
673 | def showp2rev(context, mapping): | |
674 | """Integer. The repository-local revision number of the changeset's |
|
674 | """Integer. The repository-local revision number of the changeset's | |
675 | second parent, or -1 if the changeset has no second parent.""" |
|
675 | second parent, or -1 if the changeset has no second parent.""" | |
676 | ctx = context.resource(mapping, 'ctx') |
|
676 | ctx = context.resource(mapping, 'ctx') | |
677 | return ctx.p2().rev() |
|
677 | return ctx.p2().rev() | |
678 |
|
678 | |||
679 | @templatekeyword('p1node', requires={'ctx'}) |
|
679 | @templatekeyword('p1node', requires={'ctx'}) | |
680 | def showp1node(context, mapping): |
|
680 | def showp1node(context, mapping): | |
681 | """String. The identification hash of the changeset's first parent, |
|
681 | """String. The identification hash of the changeset's first parent, | |
682 | as a 40 digit hexadecimal string. If the changeset has no parents, all |
|
682 | as a 40 digit hexadecimal string. If the changeset has no parents, all | |
683 | digits are 0.""" |
|
683 | digits are 0.""" | |
684 | ctx = context.resource(mapping, 'ctx') |
|
684 | ctx = context.resource(mapping, 'ctx') | |
685 | return ctx.p1().hex() |
|
685 | return ctx.p1().hex() | |
686 |
|
686 | |||
687 | @templatekeyword('p2node', requires={'ctx'}) |
|
687 | @templatekeyword('p2node', requires={'ctx'}) | |
688 | def showp2node(context, mapping): |
|
688 | def showp2node(context, mapping): | |
689 | """String. The identification hash of the changeset's second |
|
689 | """String. The identification hash of the changeset's second | |
690 | parent, as a 40 digit hexadecimal string. If the changeset has no second |
|
690 | parent, as a 40 digit hexadecimal string. If the changeset has no second | |
691 | parent, all digits are 0.""" |
|
691 | parent, all digits are 0.""" | |
692 | ctx = context.resource(mapping, 'ctx') |
|
692 | ctx = context.resource(mapping, 'ctx') | |
693 | return ctx.p2().hex() |
|
693 | return ctx.p2().hex() | |
694 |
|
694 | |||
695 | @templatekeyword('parents', requires={'repo', 'ctx', 'templ'}) |
|
695 | @templatekeyword('parents', requires={'repo', 'ctx', 'templ'}) | |
696 | def showparents(context, mapping): |
|
696 | def showparents(context, mapping): | |
697 | """List of strings. The parents of the changeset in "rev:node" |
|
697 | """List of strings. The parents of the changeset in "rev:node" | |
698 | format. If the changeset has only one "natural" parent (the predecessor |
|
698 | format. If the changeset has only one "natural" parent (the predecessor | |
699 | revision) nothing is shown.""" |
|
699 | revision) nothing is shown.""" | |
700 | repo = context.resource(mapping, 'repo') |
|
700 | repo = context.resource(mapping, 'repo') | |
701 | ctx = context.resource(mapping, 'ctx') |
|
701 | ctx = context.resource(mapping, 'ctx') | |
702 | templ = context.resource(mapping, 'templ') |
|
702 | templ = context.resource(mapping, 'templ') | |
703 | pctxs = scmutil.meaningfulparents(repo, ctx) |
|
703 | pctxs = scmutil.meaningfulparents(repo, ctx) | |
704 | prevs = [p.rev() for p in pctxs] |
|
704 | prevs = [p.rev() for p in pctxs] | |
705 | parents = [[('rev', p.rev()), |
|
705 | parents = [[('rev', p.rev()), | |
706 | ('node', p.hex()), |
|
706 | ('node', p.hex()), | |
707 | ('phase', p.phasestr())] |
|
707 | ('phase', p.phasestr())] | |
708 | for p in pctxs] |
|
708 | for p in pctxs] | |
709 | f = _showlist('parent', parents, templ, mapping) |
|
709 | f = _showlist('parent', parents, templ, mapping) | |
710 | return _hybrid(f, prevs, lambda x: {'ctx': repo[x], 'revcache': {}}, |
|
710 | return _hybrid(f, prevs, lambda x: {'ctx': repo[x], 'revcache': {}}, | |
711 | lambda x: scmutil.formatchangeid(repo[x]), keytype=int) |
|
711 | lambda x: scmutil.formatchangeid(repo[x]), keytype=int) | |
712 |
|
712 | |||
713 | @templatekeyword('phase', requires={'ctx'}) |
|
713 | @templatekeyword('phase', requires={'ctx'}) | |
714 | def showphase(context, mapping): |
|
714 | def showphase(context, mapping): | |
715 | """String. The changeset phase name.""" |
|
715 | """String. The changeset phase name.""" | |
716 | ctx = context.resource(mapping, 'ctx') |
|
716 | ctx = context.resource(mapping, 'ctx') | |
717 | return ctx.phasestr() |
|
717 | return ctx.phasestr() | |
718 |
|
718 | |||
719 | @templatekeyword('phaseidx', requires={'ctx'}) |
|
719 | @templatekeyword('phaseidx', requires={'ctx'}) | |
720 | def showphaseidx(context, mapping): |
|
720 | def showphaseidx(context, mapping): | |
721 | """Integer. The changeset phase index. (ADVANCED)""" |
|
721 | """Integer. The changeset phase index. (ADVANCED)""" | |
722 | ctx = context.resource(mapping, 'ctx') |
|
722 | ctx = context.resource(mapping, 'ctx') | |
723 | return ctx.phase() |
|
723 | return ctx.phase() | |
724 |
|
724 | |||
725 | @templatekeyword('rev', requires={'ctx'}) |
|
725 | @templatekeyword('rev', requires={'ctx'}) | |
726 | def showrev(context, mapping): |
|
726 | def showrev(context, mapping): | |
727 | """Integer. The repository-local changeset revision number.""" |
|
727 | """Integer. The repository-local changeset revision number.""" | |
728 | ctx = context.resource(mapping, 'ctx') |
|
728 | ctx = context.resource(mapping, 'ctx') | |
729 | return scmutil.intrev(ctx) |
|
729 | return scmutil.intrev(ctx) | |
730 |
|
730 | |||
731 | def showrevslist(context, mapping, name, revs): |
|
731 | def showrevslist(context, mapping, name, revs): | |
732 | """helper to generate a list of revisions in which a mapped template will |
|
732 | """helper to generate a list of revisions in which a mapped template will | |
733 | be evaluated""" |
|
733 | be evaluated""" | |
734 | repo = context.resource(mapping, 'repo') |
|
734 | repo = context.resource(mapping, 'repo') | |
735 | templ = context.resource(mapping, 'templ') |
|
735 | templ = context.resource(mapping, 'templ') | |
736 | f = _showlist(name, ['%d' % r for r in revs], templ, mapping) |
|
736 | f = _showlist(name, ['%d' % r for r in revs], templ, mapping) | |
737 | return _hybrid(f, revs, |
|
737 | return _hybrid(f, revs, | |
738 | lambda x: {name: x, 'ctx': repo[x], 'revcache': {}}, |
|
738 | lambda x: {name: x, 'ctx': repo[x], 'revcache': {}}, | |
739 | pycompat.identity, keytype=int) |
|
739 | pycompat.identity, keytype=int) | |
740 |
|
740 | |||
741 | @templatekeyword('subrepos', requires={'ctx', 'templ'}) |
|
741 | @templatekeyword('subrepos', requires={'ctx', 'templ'}) | |
742 | def showsubrepos(context, mapping): |
|
742 | def showsubrepos(context, mapping): | |
743 | """List of strings. Updated subrepositories in the changeset.""" |
|
743 | """List of strings. Updated subrepositories in the changeset.""" | |
744 | ctx = context.resource(mapping, 'ctx') |
|
744 | ctx = context.resource(mapping, 'ctx') | |
745 | substate = ctx.substate |
|
745 | substate = ctx.substate | |
746 | if not substate: |
|
746 | if not substate: | |
747 | return compatlist(context, mapping, 'subrepo', []) |
|
747 | return compatlist(context, mapping, 'subrepo', []) | |
748 | psubstate = ctx.parents()[0].substate or {} |
|
748 | psubstate = ctx.parents()[0].substate or {} | |
749 | subrepos = [] |
|
749 | subrepos = [] | |
750 | for sub in substate: |
|
750 | for sub in substate: | |
751 | if sub not in psubstate or substate[sub] != psubstate[sub]: |
|
751 | if sub not in psubstate or substate[sub] != psubstate[sub]: | |
752 | subrepos.append(sub) # modified or newly added in ctx |
|
752 | subrepos.append(sub) # modified or newly added in ctx | |
753 | for sub in psubstate: |
|
753 | for sub in psubstate: | |
754 | if sub not in substate: |
|
754 | if sub not in substate: | |
755 | subrepos.append(sub) # removed in ctx |
|
755 | subrepos.append(sub) # removed in ctx | |
756 | return compatlist(context, mapping, 'subrepo', sorted(subrepos)) |
|
756 | return compatlist(context, mapping, 'subrepo', sorted(subrepos)) | |
757 |
|
757 | |||
758 | # don't remove "showtags" definition, even though namespaces will put |
|
758 | # don't remove "showtags" definition, even though namespaces will put | |
759 | # a helper function for "tags" keyword into "keywords" map automatically, |
|
759 | # a helper function for "tags" keyword into "keywords" map automatically, | |
760 | # because online help text is built without namespaces initialization |
|
760 | # because online help text is built without namespaces initialization | |
761 | @templatekeyword('tags', requires={'repo', 'ctx', 'templ'}) |
|
761 | @templatekeyword('tags', requires={'repo', 'ctx', 'templ'}) | |
762 | def showtags(context, mapping): |
|
762 | def showtags(context, mapping): | |
763 | """List of strings. Any tags associated with the changeset.""" |
|
763 | """List of strings. Any tags associated with the changeset.""" | |
764 | return shownames(context, mapping, 'tags') |
|
764 | return shownames(context, mapping, 'tags') | |
765 |
|
765 | |||
766 | @templatekeyword('termwidth', requires={'ui'}) |
|
766 | @templatekeyword('termwidth', requires={'ui'}) | |
767 | def showtermwidth(context, mapping): |
|
767 | def showtermwidth(context, mapping): | |
768 | """Integer. The width of the current terminal.""" |
|
768 | """Integer. The width of the current terminal.""" | |
769 | ui = context.resource(mapping, 'ui') |
|
769 | ui = context.resource(mapping, 'ui') | |
770 | return ui.termwidth() |
|
770 | return ui.termwidth() | |
771 |
|
771 | |||
772 | @templatekeyword('instabilities', requires={'ctx', 'templ'}) |
|
772 | @templatekeyword('instabilities', requires={'ctx', 'templ'}) | |
773 | def showinstabilities(context, mapping): |
|
773 | def showinstabilities(context, mapping): | |
774 | """List of strings. Evolution instabilities affecting the changeset. |
|
774 | """List of strings. Evolution instabilities affecting the changeset. | |
775 | (EXPERIMENTAL) |
|
775 | (EXPERIMENTAL) | |
776 | """ |
|
776 | """ | |
777 | ctx = context.resource(mapping, 'ctx') |
|
777 | ctx = context.resource(mapping, 'ctx') | |
778 | return compatlist(context, mapping, 'instability', ctx.instabilities(), |
|
778 | return compatlist(context, mapping, 'instability', ctx.instabilities(), | |
779 | plural='instabilities') |
|
779 | plural='instabilities') | |
780 |
|
780 | |||
781 | @templatekeyword('verbosity', requires={'ui'}) |
|
781 | @templatekeyword('verbosity', requires={'ui'}) | |
782 | def showverbosity(context, mapping): |
|
782 | def showverbosity(context, mapping): | |
783 | """String. The current output verbosity in 'debug', 'quiet', 'verbose', |
|
783 | """String. The current output verbosity in 'debug', 'quiet', 'verbose', | |
784 | or ''.""" |
|
784 | or ''.""" | |
785 | ui = context.resource(mapping, 'ui') |
|
785 | ui = context.resource(mapping, 'ui') | |
786 | # see logcmdutil.changesettemplater for priority of these flags |
|
786 | # see logcmdutil.changesettemplater for priority of these flags | |
787 | if ui.debugflag: |
|
787 | if ui.debugflag: | |
788 | return 'debug' |
|
788 | return 'debug' | |
789 | elif ui.quiet: |
|
789 | elif ui.quiet: | |
790 | return 'quiet' |
|
790 | return 'quiet' | |
791 | elif ui.verbose: |
|
791 | elif ui.verbose: | |
792 | return 'verbose' |
|
792 | return 'verbose' | |
793 | return '' |
|
793 | return '' | |
794 |
|
794 | |||
795 | def loadkeyword(ui, extname, registrarobj): |
|
795 | def loadkeyword(ui, extname, registrarobj): | |
796 | """Load template keyword from specified registrarobj |
|
796 | """Load template keyword from specified registrarobj | |
797 | """ |
|
797 | """ | |
798 | for name, func in registrarobj._table.iteritems(): |
|
798 | for name, func in registrarobj._table.iteritems(): | |
799 | keywords[name] = func |
|
799 | keywords[name] = func | |
800 |
|
800 | |||
801 | # tell hggettext to extract docstrings from these functions: |
|
801 | # tell hggettext to extract docstrings from these functions: | |
802 | i18nfunctions = keywords.values() |
|
802 | i18nfunctions = keywords.values() |
@@ -1,842 +1,842 b'' | |||||
1 | # templater.py - template expansion for output |
|
1 | # templater.py - template expansion for output | |
2 | # |
|
2 | # | |
3 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | """Slightly complicated template engine for commands and hgweb |
|
8 | """Slightly complicated template engine for commands and hgweb | |
9 |
|
9 | |||
10 | This module provides low-level interface to the template engine. See the |
|
10 | This module provides low-level interface to the template engine. See the | |
11 | formatter and cmdutil modules if you are looking for high-level functions |
|
11 | formatter and cmdutil modules if you are looking for high-level functions | |
12 | such as ``cmdutil.rendertemplate(ctx, tmpl)``. |
|
12 | such as ``cmdutil.rendertemplate(ctx, tmpl)``. | |
13 |
|
13 | |||
14 | Internal Data Types |
|
14 | Internal Data Types | |
15 | ------------------- |
|
15 | ------------------- | |
16 |
|
16 | |||
17 | Template keywords and functions take a dictionary of current symbols and |
|
17 | Template keywords and functions take a dictionary of current symbols and | |
18 | resources (a "mapping") and return result. Inputs and outputs must be one |
|
18 | resources (a "mapping") and return result. Inputs and outputs must be one | |
19 | of the following data types: |
|
19 | of the following data types: | |
20 |
|
20 | |||
21 | bytes |
|
21 | bytes | |
22 | a byte string, which is generally a human-readable text in local encoding. |
|
22 | a byte string, which is generally a human-readable text in local encoding. | |
23 |
|
23 | |||
24 | generator |
|
24 | generator | |
25 | a lazily-evaluated byte string, which is a possibly nested generator of |
|
25 | a lazily-evaluated byte string, which is a possibly nested generator of | |
26 | values of any printable types, and will be folded by ``stringify()`` |
|
26 | values of any printable types, and will be folded by ``stringify()`` | |
27 | or ``flatten()``. |
|
27 | or ``flatten()``. | |
28 |
|
28 | |||
29 | BUG: hgweb overloads this type for mappings (i.e. some hgweb keywords |
|
29 | BUG: hgweb overloads this type for mappings (i.e. some hgweb keywords | |
30 | returns a generator of dicts.) |
|
30 | returns a generator of dicts.) | |
31 |
|
31 | |||
32 | None |
|
32 | None | |
33 | sometimes represents an empty value, which can be stringified to ''. |
|
33 | sometimes represents an empty value, which can be stringified to ''. | |
34 |
|
34 | |||
35 | True, False, int, float |
|
35 | True, False, int, float | |
36 | can be stringified as such. |
|
36 | can be stringified as such. | |
37 |
|
37 | |||
38 | date tuple |
|
38 | date tuple | |
39 | a (unixtime, offset) tuple, which produces no meaningful output by itself. |
|
39 | a (unixtime, offset) tuple, which produces no meaningful output by itself. | |
40 |
|
40 | |||
41 | hybrid |
|
41 | hybrid | |
42 | represents a list/dict of printable values, which can also be converted |
|
42 | represents a list/dict of printable values, which can also be converted | |
43 | to mappings by % operator. |
|
43 | to mappings by % operator. | |
44 |
|
44 | |||
45 | mappable |
|
45 | mappable | |
46 | represents a scalar printable value, also supports % operator. |
|
46 | represents a scalar printable value, also supports % operator. | |
47 | """ |
|
47 | """ | |
48 |
|
48 | |||
49 | from __future__ import absolute_import, print_function |
|
49 | from __future__ import absolute_import, print_function | |
50 |
|
50 | |||
51 | import os |
|
51 | import os | |
52 |
|
52 | |||
53 | from .i18n import _ |
|
53 | from .i18n import _ | |
54 | from . import ( |
|
54 | from . import ( | |
55 | config, |
|
55 | config, | |
56 | encoding, |
|
56 | encoding, | |
57 | error, |
|
57 | error, | |
58 | parser, |
|
58 | parser, | |
59 | pycompat, |
|
59 | pycompat, | |
60 | templatefilters, |
|
60 | templatefilters, | |
61 | templatefuncs, |
|
61 | templatefuncs, | |
62 | templateutil, |
|
62 | templateutil, | |
63 | util, |
|
63 | util, | |
64 | ) |
|
64 | ) | |
65 |
|
65 | |||
66 | # template parsing |
|
66 | # template parsing | |
67 |
|
67 | |||
68 | elements = { |
|
68 | elements = { | |
69 | # token-type: binding-strength, primary, prefix, infix, suffix |
|
69 | # token-type: binding-strength, primary, prefix, infix, suffix | |
70 | "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None), |
|
70 | "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None), | |
71 | ".": (18, None, None, (".", 18), None), |
|
71 | ".": (18, None, None, (".", 18), None), | |
72 | "%": (15, None, None, ("%", 15), None), |
|
72 | "%": (15, None, None, ("%", 15), None), | |
73 | "|": (15, None, None, ("|", 15), None), |
|
73 | "|": (15, None, None, ("|", 15), None), | |
74 | "*": (5, None, None, ("*", 5), None), |
|
74 | "*": (5, None, None, ("*", 5), None), | |
75 | "/": (5, None, None, ("/", 5), None), |
|
75 | "/": (5, None, None, ("/", 5), None), | |
76 | "+": (4, None, None, ("+", 4), None), |
|
76 | "+": (4, None, None, ("+", 4), None), | |
77 | "-": (4, None, ("negate", 19), ("-", 4), None), |
|
77 | "-": (4, None, ("negate", 19), ("-", 4), None), | |
78 | "=": (3, None, None, ("keyvalue", 3), None), |
|
78 | "=": (3, None, None, ("keyvalue", 3), None), | |
79 | ",": (2, None, None, ("list", 2), None), |
|
79 | ",": (2, None, None, ("list", 2), None), | |
80 | ")": (0, None, None, None, None), |
|
80 | ")": (0, None, None, None, None), | |
81 | "integer": (0, "integer", None, None, None), |
|
81 | "integer": (0, "integer", None, None, None), | |
82 | "symbol": (0, "symbol", None, None, None), |
|
82 | "symbol": (0, "symbol", None, None, None), | |
83 | "string": (0, "string", None, None, None), |
|
83 | "string": (0, "string", None, None, None), | |
84 | "template": (0, "template", None, None, None), |
|
84 | "template": (0, "template", None, None, None), | |
85 | "end": (0, None, None, None, None), |
|
85 | "end": (0, None, None, None, None), | |
86 | } |
|
86 | } | |
87 |
|
87 | |||
88 | def tokenize(program, start, end, term=None): |
|
88 | def tokenize(program, start, end, term=None): | |
89 | """Parse a template expression into a stream of tokens, which must end |
|
89 | """Parse a template expression into a stream of tokens, which must end | |
90 | with term if specified""" |
|
90 | with term if specified""" | |
91 | pos = start |
|
91 | pos = start | |
92 | program = pycompat.bytestr(program) |
|
92 | program = pycompat.bytestr(program) | |
93 | while pos < end: |
|
93 | while pos < end: | |
94 | c = program[pos] |
|
94 | c = program[pos] | |
95 | if c.isspace(): # skip inter-token whitespace |
|
95 | if c.isspace(): # skip inter-token whitespace | |
96 | pass |
|
96 | pass | |
97 | elif c in "(=,).%|+-*/": # handle simple operators |
|
97 | elif c in "(=,).%|+-*/": # handle simple operators | |
98 | yield (c, None, pos) |
|
98 | yield (c, None, pos) | |
99 | elif c in '"\'': # handle quoted templates |
|
99 | elif c in '"\'': # handle quoted templates | |
100 | s = pos + 1 |
|
100 | s = pos + 1 | |
101 | data, pos = _parsetemplate(program, s, end, c) |
|
101 | data, pos = _parsetemplate(program, s, end, c) | |
102 | yield ('template', data, s) |
|
102 | yield ('template', data, s) | |
103 | pos -= 1 |
|
103 | pos -= 1 | |
104 | elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'): |
|
104 | elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'): | |
105 | # handle quoted strings |
|
105 | # handle quoted strings | |
106 | c = program[pos + 1] |
|
106 | c = program[pos + 1] | |
107 | s = pos = pos + 2 |
|
107 | s = pos = pos + 2 | |
108 | while pos < end: # find closing quote |
|
108 | while pos < end: # find closing quote | |
109 | d = program[pos] |
|
109 | d = program[pos] | |
110 | if d == '\\': # skip over escaped characters |
|
110 | if d == '\\': # skip over escaped characters | |
111 | pos += 2 |
|
111 | pos += 2 | |
112 | continue |
|
112 | continue | |
113 | if d == c: |
|
113 | if d == c: | |
114 | yield ('string', program[s:pos], s) |
|
114 | yield ('string', program[s:pos], s) | |
115 | break |
|
115 | break | |
116 | pos += 1 |
|
116 | pos += 1 | |
117 | else: |
|
117 | else: | |
118 | raise error.ParseError(_("unterminated string"), s) |
|
118 | raise error.ParseError(_("unterminated string"), s) | |
119 | elif c.isdigit(): |
|
119 | elif c.isdigit(): | |
120 | s = pos |
|
120 | s = pos | |
121 | while pos < end: |
|
121 | while pos < end: | |
122 | d = program[pos] |
|
122 | d = program[pos] | |
123 | if not d.isdigit(): |
|
123 | if not d.isdigit(): | |
124 | break |
|
124 | break | |
125 | pos += 1 |
|
125 | pos += 1 | |
126 | yield ('integer', program[s:pos], s) |
|
126 | yield ('integer', program[s:pos], s) | |
127 | pos -= 1 |
|
127 | pos -= 1 | |
128 | elif (c == '\\' and program[pos:pos + 2] in (br"\'", br'\"') |
|
128 | elif (c == '\\' and program[pos:pos + 2] in (br"\'", br'\"') | |
129 | or c == 'r' and program[pos:pos + 3] in (br"r\'", br'r\"')): |
|
129 | or c == 'r' and program[pos:pos + 3] in (br"r\'", br'r\"')): | |
130 | # handle escaped quoted strings for compatibility with 2.9.2-3.4, |
|
130 | # handle escaped quoted strings for compatibility with 2.9.2-3.4, | |
131 | # where some of nested templates were preprocessed as strings and |
|
131 | # where some of nested templates were preprocessed as strings and | |
132 | # then compiled. therefore, \"...\" was allowed. (issue4733) |
|
132 | # then compiled. therefore, \"...\" was allowed. (issue4733) | |
133 | # |
|
133 | # | |
134 | # processing flow of _evalifliteral() at 5ab28a2e9962: |
|
134 | # processing flow of _evalifliteral() at 5ab28a2e9962: | |
135 | # outer template string -> stringify() -> compiletemplate() |
|
135 | # outer template string -> stringify() -> compiletemplate() | |
136 | # ------------------------ ------------ ------------------ |
|
136 | # ------------------------ ------------ ------------------ | |
137 | # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}] |
|
137 | # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}] | |
138 | # ~~~~~~~~ |
|
138 | # ~~~~~~~~ | |
139 | # escaped quoted string |
|
139 | # escaped quoted string | |
140 | if c == 'r': |
|
140 | if c == 'r': | |
141 | pos += 1 |
|
141 | pos += 1 | |
142 | token = 'string' |
|
142 | token = 'string' | |
143 | else: |
|
143 | else: | |
144 | token = 'template' |
|
144 | token = 'template' | |
145 | quote = program[pos:pos + 2] |
|
145 | quote = program[pos:pos + 2] | |
146 | s = pos = pos + 2 |
|
146 | s = pos = pos + 2 | |
147 | while pos < end: # find closing escaped quote |
|
147 | while pos < end: # find closing escaped quote | |
148 | if program.startswith('\\\\\\', pos, end): |
|
148 | if program.startswith('\\\\\\', pos, end): | |
149 | pos += 4 # skip over double escaped characters |
|
149 | pos += 4 # skip over double escaped characters | |
150 | continue |
|
150 | continue | |
151 | if program.startswith(quote, pos, end): |
|
151 | if program.startswith(quote, pos, end): | |
152 | # interpret as if it were a part of an outer string |
|
152 | # interpret as if it were a part of an outer string | |
153 | data = parser.unescapestr(program[s:pos]) |
|
153 | data = parser.unescapestr(program[s:pos]) | |
154 | if token == 'template': |
|
154 | if token == 'template': | |
155 | data = _parsetemplate(data, 0, len(data))[0] |
|
155 | data = _parsetemplate(data, 0, len(data))[0] | |
156 | yield (token, data, s) |
|
156 | yield (token, data, s) | |
157 | pos += 1 |
|
157 | pos += 1 | |
158 | break |
|
158 | break | |
159 | pos += 1 |
|
159 | pos += 1 | |
160 | else: |
|
160 | else: | |
161 | raise error.ParseError(_("unterminated string"), s) |
|
161 | raise error.ParseError(_("unterminated string"), s) | |
162 | elif c.isalnum() or c in '_': |
|
162 | elif c.isalnum() or c in '_': | |
163 | s = pos |
|
163 | s = pos | |
164 | pos += 1 |
|
164 | pos += 1 | |
165 | while pos < end: # find end of symbol |
|
165 | while pos < end: # find end of symbol | |
166 | d = program[pos] |
|
166 | d = program[pos] | |
167 | if not (d.isalnum() or d == "_"): |
|
167 | if not (d.isalnum() or d == "_"): | |
168 | break |
|
168 | break | |
169 | pos += 1 |
|
169 | pos += 1 | |
170 | sym = program[s:pos] |
|
170 | sym = program[s:pos] | |
171 | yield ('symbol', sym, s) |
|
171 | yield ('symbol', sym, s) | |
172 | pos -= 1 |
|
172 | pos -= 1 | |
173 | elif c == term: |
|
173 | elif c == term: | |
174 | yield ('end', None, pos) |
|
174 | yield ('end', None, pos) | |
175 | return |
|
175 | return | |
176 | else: |
|
176 | else: | |
177 | raise error.ParseError(_("syntax error"), pos) |
|
177 | raise error.ParseError(_("syntax error"), pos) | |
178 | pos += 1 |
|
178 | pos += 1 | |
179 | if term: |
|
179 | if term: | |
180 | raise error.ParseError(_("unterminated template expansion"), start) |
|
180 | raise error.ParseError(_("unterminated template expansion"), start) | |
181 | yield ('end', None, pos) |
|
181 | yield ('end', None, pos) | |
182 |
|
182 | |||
183 | def _parsetemplate(tmpl, start, stop, quote=''): |
|
183 | def _parsetemplate(tmpl, start, stop, quote=''): | |
184 | r""" |
|
184 | r""" | |
185 | >>> _parsetemplate(b'foo{bar}"baz', 0, 12) |
|
185 | >>> _parsetemplate(b'foo{bar}"baz', 0, 12) | |
186 | ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12) |
|
186 | ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12) | |
187 | >>> _parsetemplate(b'foo{bar}"baz', 0, 12, quote=b'"') |
|
187 | >>> _parsetemplate(b'foo{bar}"baz', 0, 12, quote=b'"') | |
188 | ([('string', 'foo'), ('symbol', 'bar')], 9) |
|
188 | ([('string', 'foo'), ('symbol', 'bar')], 9) | |
189 | >>> _parsetemplate(b'foo"{bar}', 0, 9, quote=b'"') |
|
189 | >>> _parsetemplate(b'foo"{bar}', 0, 9, quote=b'"') | |
190 | ([('string', 'foo')], 4) |
|
190 | ([('string', 'foo')], 4) | |
191 | >>> _parsetemplate(br'foo\"bar"baz', 0, 12, quote=b'"') |
|
191 | >>> _parsetemplate(br'foo\"bar"baz', 0, 12, quote=b'"') | |
192 | ([('string', 'foo"'), ('string', 'bar')], 9) |
|
192 | ([('string', 'foo"'), ('string', 'bar')], 9) | |
193 | >>> _parsetemplate(br'foo\\"bar', 0, 10, quote=b'"') |
|
193 | >>> _parsetemplate(br'foo\\"bar', 0, 10, quote=b'"') | |
194 | ([('string', 'foo\\')], 6) |
|
194 | ([('string', 'foo\\')], 6) | |
195 | """ |
|
195 | """ | |
196 | parsed = [] |
|
196 | parsed = [] | |
197 | for typ, val, pos in _scantemplate(tmpl, start, stop, quote): |
|
197 | for typ, val, pos in _scantemplate(tmpl, start, stop, quote): | |
198 | if typ == 'string': |
|
198 | if typ == 'string': | |
199 | parsed.append((typ, val)) |
|
199 | parsed.append((typ, val)) | |
200 | elif typ == 'template': |
|
200 | elif typ == 'template': | |
201 | parsed.append(val) |
|
201 | parsed.append(val) | |
202 | elif typ == 'end': |
|
202 | elif typ == 'end': | |
203 | return parsed, pos |
|
203 | return parsed, pos | |
204 | else: |
|
204 | else: | |
205 | raise error.ProgrammingError('unexpected type: %s' % typ) |
|
205 | raise error.ProgrammingError('unexpected type: %s' % typ) | |
206 | raise error.ProgrammingError('unterminated scanning of template') |
|
206 | raise error.ProgrammingError('unterminated scanning of template') | |
207 |
|
207 | |||
208 | def scantemplate(tmpl, raw=False): |
|
208 | def scantemplate(tmpl, raw=False): | |
209 | r"""Scan (type, start, end) positions of outermost elements in template |
|
209 | r"""Scan (type, start, end) positions of outermost elements in template | |
210 |
|
210 | |||
211 | If raw=True, a backslash is not taken as an escape character just like |
|
211 | If raw=True, a backslash is not taken as an escape character just like | |
212 | r'' string in Python. Note that this is different from r'' literal in |
|
212 | r'' string in Python. Note that this is different from r'' literal in | |
213 | template in that no template fragment can appear in r'', e.g. r'{foo}' |
|
213 | template in that no template fragment can appear in r'', e.g. r'{foo}' | |
214 | is a literal '{foo}', but ('{foo}', raw=True) is a template expression |
|
214 | is a literal '{foo}', but ('{foo}', raw=True) is a template expression | |
215 | 'foo'. |
|
215 | 'foo'. | |
216 |
|
216 | |||
217 | >>> list(scantemplate(b'foo{bar}"baz')) |
|
217 | >>> list(scantemplate(b'foo{bar}"baz')) | |
218 | [('string', 0, 3), ('template', 3, 8), ('string', 8, 12)] |
|
218 | [('string', 0, 3), ('template', 3, 8), ('string', 8, 12)] | |
219 | >>> list(scantemplate(b'outer{"inner"}outer')) |
|
219 | >>> list(scantemplate(b'outer{"inner"}outer')) | |
220 | [('string', 0, 5), ('template', 5, 14), ('string', 14, 19)] |
|
220 | [('string', 0, 5), ('template', 5, 14), ('string', 14, 19)] | |
221 | >>> list(scantemplate(b'foo\\{escaped}')) |
|
221 | >>> list(scantemplate(b'foo\\{escaped}')) | |
222 | [('string', 0, 5), ('string', 5, 13)] |
|
222 | [('string', 0, 5), ('string', 5, 13)] | |
223 | >>> list(scantemplate(b'foo\\{escaped}', raw=True)) |
|
223 | >>> list(scantemplate(b'foo\\{escaped}', raw=True)) | |
224 | [('string', 0, 4), ('template', 4, 13)] |
|
224 | [('string', 0, 4), ('template', 4, 13)] | |
225 | """ |
|
225 | """ | |
226 | last = None |
|
226 | last = None | |
227 | for typ, val, pos in _scantemplate(tmpl, 0, len(tmpl), raw=raw): |
|
227 | for typ, val, pos in _scantemplate(tmpl, 0, len(tmpl), raw=raw): | |
228 | if last: |
|
228 | if last: | |
229 | yield last + (pos,) |
|
229 | yield last + (pos,) | |
230 | if typ == 'end': |
|
230 | if typ == 'end': | |
231 | return |
|
231 | return | |
232 | else: |
|
232 | else: | |
233 | last = (typ, pos) |
|
233 | last = (typ, pos) | |
234 | raise error.ProgrammingError('unterminated scanning of template') |
|
234 | raise error.ProgrammingError('unterminated scanning of template') | |
235 |
|
235 | |||
236 | def _scantemplate(tmpl, start, stop, quote='', raw=False): |
|
236 | def _scantemplate(tmpl, start, stop, quote='', raw=False): | |
237 | """Parse template string into chunks of strings and template expressions""" |
|
237 | """Parse template string into chunks of strings and template expressions""" | |
238 | sepchars = '{' + quote |
|
238 | sepchars = '{' + quote | |
239 | unescape = [parser.unescapestr, pycompat.identity][raw] |
|
239 | unescape = [parser.unescapestr, pycompat.identity][raw] | |
240 | pos = start |
|
240 | pos = start | |
241 | p = parser.parser(elements) |
|
241 | p = parser.parser(elements) | |
242 | try: |
|
242 | try: | |
243 | while pos < stop: |
|
243 | while pos < stop: | |
244 | n = min((tmpl.find(c, pos, stop) for c in sepchars), |
|
244 | n = min((tmpl.find(c, pos, stop) for c in sepchars), | |
245 | key=lambda n: (n < 0, n)) |
|
245 | key=lambda n: (n < 0, n)) | |
246 | if n < 0: |
|
246 | if n < 0: | |
247 | yield ('string', unescape(tmpl[pos:stop]), pos) |
|
247 | yield ('string', unescape(tmpl[pos:stop]), pos) | |
248 | pos = stop |
|
248 | pos = stop | |
249 | break |
|
249 | break | |
250 | c = tmpl[n:n + 1] |
|
250 | c = tmpl[n:n + 1] | |
251 | bs = 0 # count leading backslashes |
|
251 | bs = 0 # count leading backslashes | |
252 | if not raw: |
|
252 | if not raw: | |
253 | bs = (n - pos) - len(tmpl[pos:n].rstrip('\\')) |
|
253 | bs = (n - pos) - len(tmpl[pos:n].rstrip('\\')) | |
254 | if bs % 2 == 1: |
|
254 | if bs % 2 == 1: | |
255 | # escaped (e.g. '\{', '\\\{', but not '\\{') |
|
255 | # escaped (e.g. '\{', '\\\{', but not '\\{') | |
256 | yield ('string', unescape(tmpl[pos:n - 1]) + c, pos) |
|
256 | yield ('string', unescape(tmpl[pos:n - 1]) + c, pos) | |
257 | pos = n + 1 |
|
257 | pos = n + 1 | |
258 | continue |
|
258 | continue | |
259 | if n > pos: |
|
259 | if n > pos: | |
260 | yield ('string', unescape(tmpl[pos:n]), pos) |
|
260 | yield ('string', unescape(tmpl[pos:n]), pos) | |
261 | if c == quote: |
|
261 | if c == quote: | |
262 | yield ('end', None, n + 1) |
|
262 | yield ('end', None, n + 1) | |
263 | return |
|
263 | return | |
264 |
|
264 | |||
265 | parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}')) |
|
265 | parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}')) | |
266 | if not tmpl.startswith('}', pos): |
|
266 | if not tmpl.startswith('}', pos): | |
267 | raise error.ParseError(_("invalid token"), pos) |
|
267 | raise error.ParseError(_("invalid token"), pos) | |
268 | yield ('template', parseres, n) |
|
268 | yield ('template', parseres, n) | |
269 | pos += 1 |
|
269 | pos += 1 | |
270 |
|
270 | |||
271 | if quote: |
|
271 | if quote: | |
272 | raise error.ParseError(_("unterminated string"), start) |
|
272 | raise error.ParseError(_("unterminated string"), start) | |
273 | except error.ParseError as inst: |
|
273 | except error.ParseError as inst: | |
274 | if len(inst.args) > 1: # has location |
|
274 | if len(inst.args) > 1: # has location | |
275 | loc = inst.args[1] |
|
275 | loc = inst.args[1] | |
276 | # Offset the caret location by the number of newlines before the |
|
276 | # Offset the caret location by the number of newlines before the | |
277 | # location of the error, since we will replace one-char newlines |
|
277 | # location of the error, since we will replace one-char newlines | |
278 | # with the two-char literal r'\n'. |
|
278 | # with the two-char literal r'\n'. | |
279 | offset = tmpl[:loc].count('\n') |
|
279 | offset = tmpl[:loc].count('\n') | |
280 | tmpl = tmpl.replace('\n', br'\n') |
|
280 | tmpl = tmpl.replace('\n', br'\n') | |
281 | # We want the caret to point to the place in the template that |
|
281 | # We want the caret to point to the place in the template that | |
282 | # failed to parse, but in a hint we get a open paren at the |
|
282 | # failed to parse, but in a hint we get a open paren at the | |
283 | # start. Therefore, we print "loc + 1" spaces (instead of "loc") |
|
283 | # start. Therefore, we print "loc + 1" spaces (instead of "loc") | |
284 | # to line up the caret with the location of the error. |
|
284 | # to line up the caret with the location of the error. | |
285 | inst.hint = (tmpl + '\n' |
|
285 | inst.hint = (tmpl + '\n' | |
286 | + ' ' * (loc + 1 + offset) + '^ ' + _('here')) |
|
286 | + ' ' * (loc + 1 + offset) + '^ ' + _('here')) | |
287 | raise |
|
287 | raise | |
288 | yield ('end', None, pos) |
|
288 | yield ('end', None, pos) | |
289 |
|
289 | |||
290 | def _unnesttemplatelist(tree): |
|
290 | def _unnesttemplatelist(tree): | |
291 | """Expand list of templates to node tuple |
|
291 | """Expand list of templates to node tuple | |
292 |
|
292 | |||
293 | >>> def f(tree): |
|
293 | >>> def f(tree): | |
294 | ... print(pycompat.sysstr(prettyformat(_unnesttemplatelist(tree)))) |
|
294 | ... print(pycompat.sysstr(prettyformat(_unnesttemplatelist(tree)))) | |
295 | >>> f((b'template', [])) |
|
295 | >>> f((b'template', [])) | |
296 | (string '') |
|
296 | (string '') | |
297 | >>> f((b'template', [(b'string', b'foo')])) |
|
297 | >>> f((b'template', [(b'string', b'foo')])) | |
298 | (string 'foo') |
|
298 | (string 'foo') | |
299 | >>> f((b'template', [(b'string', b'foo'), (b'symbol', b'rev')])) |
|
299 | >>> f((b'template', [(b'string', b'foo'), (b'symbol', b'rev')])) | |
300 | (template |
|
300 | (template | |
301 | (string 'foo') |
|
301 | (string 'foo') | |
302 | (symbol 'rev')) |
|
302 | (symbol 'rev')) | |
303 | >>> f((b'template', [(b'symbol', b'rev')])) # template(rev) -> str |
|
303 | >>> f((b'template', [(b'symbol', b'rev')])) # template(rev) -> str | |
304 | (template |
|
304 | (template | |
305 | (symbol 'rev')) |
|
305 | (symbol 'rev')) | |
306 | >>> f((b'template', [(b'template', [(b'string', b'foo')])])) |
|
306 | >>> f((b'template', [(b'template', [(b'string', b'foo')])])) | |
307 | (string 'foo') |
|
307 | (string 'foo') | |
308 | """ |
|
308 | """ | |
309 | if not isinstance(tree, tuple): |
|
309 | if not isinstance(tree, tuple): | |
310 | return tree |
|
310 | return tree | |
311 | op = tree[0] |
|
311 | op = tree[0] | |
312 | if op != 'template': |
|
312 | if op != 'template': | |
313 | return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:]) |
|
313 | return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:]) | |
314 |
|
314 | |||
315 | assert len(tree) == 2 |
|
315 | assert len(tree) == 2 | |
316 | xs = tuple(_unnesttemplatelist(x) for x in tree[1]) |
|
316 | xs = tuple(_unnesttemplatelist(x) for x in tree[1]) | |
317 | if not xs: |
|
317 | if not xs: | |
318 | return ('string', '') # empty template "" |
|
318 | return ('string', '') # empty template "" | |
319 | elif len(xs) == 1 and xs[0][0] == 'string': |
|
319 | elif len(xs) == 1 and xs[0][0] == 'string': | |
320 | return xs[0] # fast path for string with no template fragment "x" |
|
320 | return xs[0] # fast path for string with no template fragment "x" | |
321 | else: |
|
321 | else: | |
322 | return (op,) + xs |
|
322 | return (op,) + xs | |
323 |
|
323 | |||
324 | def parse(tmpl): |
|
324 | def parse(tmpl): | |
325 | """Parse template string into tree""" |
|
325 | """Parse template string into tree""" | |
326 | parsed, pos = _parsetemplate(tmpl, 0, len(tmpl)) |
|
326 | parsed, pos = _parsetemplate(tmpl, 0, len(tmpl)) | |
327 | assert pos == len(tmpl), 'unquoted template should be consumed' |
|
327 | assert pos == len(tmpl), 'unquoted template should be consumed' | |
328 | return _unnesttemplatelist(('template', parsed)) |
|
328 | return _unnesttemplatelist(('template', parsed)) | |
329 |
|
329 | |||
330 | def _parseexpr(expr): |
|
330 | def _parseexpr(expr): | |
331 | """Parse a template expression into tree |
|
331 | """Parse a template expression into tree | |
332 |
|
332 | |||
333 | >>> _parseexpr(b'"foo"') |
|
333 | >>> _parseexpr(b'"foo"') | |
334 | ('string', 'foo') |
|
334 | ('string', 'foo') | |
335 | >>> _parseexpr(b'foo(bar)') |
|
335 | >>> _parseexpr(b'foo(bar)') | |
336 | ('func', ('symbol', 'foo'), ('symbol', 'bar')) |
|
336 | ('func', ('symbol', 'foo'), ('symbol', 'bar')) | |
337 | >>> _parseexpr(b'foo(') |
|
337 | >>> _parseexpr(b'foo(') | |
338 | Traceback (most recent call last): |
|
338 | Traceback (most recent call last): | |
339 | ... |
|
339 | ... | |
340 | ParseError: ('not a prefix: end', 4) |
|
340 | ParseError: ('not a prefix: end', 4) | |
341 | >>> _parseexpr(b'"foo" "bar"') |
|
341 | >>> _parseexpr(b'"foo" "bar"') | |
342 | Traceback (most recent call last): |
|
342 | Traceback (most recent call last): | |
343 | ... |
|
343 | ... | |
344 | ParseError: ('invalid token', 7) |
|
344 | ParseError: ('invalid token', 7) | |
345 | """ |
|
345 | """ | |
346 | p = parser.parser(elements) |
|
346 | p = parser.parser(elements) | |
347 | tree, pos = p.parse(tokenize(expr, 0, len(expr))) |
|
347 | tree, pos = p.parse(tokenize(expr, 0, len(expr))) | |
348 | if pos != len(expr): |
|
348 | if pos != len(expr): | |
349 | raise error.ParseError(_('invalid token'), pos) |
|
349 | raise error.ParseError(_('invalid token'), pos) | |
350 | return _unnesttemplatelist(tree) |
|
350 | return _unnesttemplatelist(tree) | |
351 |
|
351 | |||
352 | def prettyformat(tree): |
|
352 | def prettyformat(tree): | |
353 | return parser.prettyformat(tree, ('integer', 'string', 'symbol')) |
|
353 | return parser.prettyformat(tree, ('integer', 'string', 'symbol')) | |
354 |
|
354 | |||
355 | def compileexp(exp, context, curmethods): |
|
355 | def compileexp(exp, context, curmethods): | |
356 | """Compile parsed template tree to (func, data) pair""" |
|
356 | """Compile parsed template tree to (func, data) pair""" | |
357 | if not exp: |
|
357 | if not exp: | |
358 | raise error.ParseError(_("missing argument")) |
|
358 | raise error.ParseError(_("missing argument")) | |
359 | t = exp[0] |
|
359 | t = exp[0] | |
360 | if t in curmethods: |
|
360 | if t in curmethods: | |
361 | return curmethods[t](exp, context) |
|
361 | return curmethods[t](exp, context) | |
362 | raise error.ParseError(_("unknown method '%s'") % t) |
|
362 | raise error.ParseError(_("unknown method '%s'") % t) | |
363 |
|
363 | |||
364 | # template evaluation |
|
364 | # template evaluation | |
365 |
|
365 | |||
366 | def getsymbol(exp): |
|
366 | def getsymbol(exp): | |
367 | if exp[0] == 'symbol': |
|
367 | if exp[0] == 'symbol': | |
368 | return exp[1] |
|
368 | return exp[1] | |
369 | raise error.ParseError(_("expected a symbol, got '%s'") % exp[0]) |
|
369 | raise error.ParseError(_("expected a symbol, got '%s'") % exp[0]) | |
370 |
|
370 | |||
371 | def getlist(x): |
|
371 | def getlist(x): | |
372 | if not x: |
|
372 | if not x: | |
373 | return [] |
|
373 | return [] | |
374 | if x[0] == 'list': |
|
374 | if x[0] == 'list': | |
375 | return getlist(x[1]) + [x[2]] |
|
375 | return getlist(x[1]) + [x[2]] | |
376 | return [x] |
|
376 | return [x] | |
377 |
|
377 | |||
378 | def gettemplate(exp, context): |
|
378 | def gettemplate(exp, context): | |
379 | """Compile given template tree or load named template from map file; |
|
379 | """Compile given template tree or load named template from map file; | |
380 | returns (func, data) pair""" |
|
380 | returns (func, data) pair""" | |
381 | if exp[0] in ('template', 'string'): |
|
381 | if exp[0] in ('template', 'string'): | |
382 | return compileexp(exp, context, methods) |
|
382 | return compileexp(exp, context, methods) | |
383 | if exp[0] == 'symbol': |
|
383 | if exp[0] == 'symbol': | |
384 | # unlike runsymbol(), here 'symbol' is always taken as template name |
|
384 | # unlike runsymbol(), here 'symbol' is always taken as template name | |
385 | # even if it exists in mapping. this allows us to override mapping |
|
385 | # even if it exists in mapping. this allows us to override mapping | |
386 | # by web templates, e.g. 'changelogtag' is redefined in map file. |
|
386 | # by web templates, e.g. 'changelogtag' is redefined in map file. | |
387 | return context._load(exp[1]) |
|
387 | return context._load(exp[1]) | |
388 | raise error.ParseError(_("expected template specifier")) |
|
388 | raise error.ParseError(_("expected template specifier")) | |
389 |
|
389 | |||
390 | def _runrecursivesymbol(context, mapping, key): |
|
390 | def _runrecursivesymbol(context, mapping, key): | |
391 | raise error.Abort(_("recursive reference '%s' in template") % key) |
|
391 | raise error.Abort(_("recursive reference '%s' in template") % key) | |
392 |
|
392 | |||
393 | def buildtemplate(exp, context): |
|
393 | def buildtemplate(exp, context): | |
394 | ctmpl = [compileexp(e, context, methods) for e in exp[1:]] |
|
394 | ctmpl = [compileexp(e, context, methods) for e in exp[1:]] | |
395 | return (templateutil.runtemplate, ctmpl) |
|
395 | return (templateutil.runtemplate, ctmpl) | |
396 |
|
396 | |||
397 | def buildfilter(exp, context): |
|
397 | def buildfilter(exp, context): | |
398 | n = getsymbol(exp[2]) |
|
398 | n = getsymbol(exp[2]) | |
399 | if n in context._filters: |
|
399 | if n in context._filters: | |
400 | filt = context._filters[n] |
|
400 | filt = context._filters[n] | |
401 | arg = compileexp(exp[1], context, methods) |
|
401 | arg = compileexp(exp[1], context, methods) | |
402 | return (templateutil.runfilter, (arg, filt)) |
|
402 | return (templateutil.runfilter, (arg, filt)) | |
403 | if n in context._funcs: |
|
403 | if n in context._funcs: | |
404 | f = context._funcs[n] |
|
404 | f = context._funcs[n] | |
405 | args = _buildfuncargs(exp[1], context, methods, n, f._argspec) |
|
405 | args = _buildfuncargs(exp[1], context, methods, n, f._argspec) | |
406 | return (f, args) |
|
406 | return (f, args) | |
407 | raise error.ParseError(_("unknown function '%s'") % n) |
|
407 | raise error.ParseError(_("unknown function '%s'") % n) | |
408 |
|
408 | |||
409 | def buildmap(exp, context): |
|
409 | def buildmap(exp, context): | |
410 | darg = compileexp(exp[1], context, methods) |
|
410 | darg = compileexp(exp[1], context, methods) | |
411 | targ = gettemplate(exp[2], context) |
|
411 | targ = gettemplate(exp[2], context) | |
412 | return (templateutil.runmap, (darg, targ)) |
|
412 | return (templateutil.runmap, (darg, targ)) | |
413 |
|
413 | |||
414 | def buildmember(exp, context): |
|
414 | def buildmember(exp, context): | |
415 | darg = compileexp(exp[1], context, methods) |
|
415 | darg = compileexp(exp[1], context, methods) | |
416 | memb = getsymbol(exp[2]) |
|
416 | memb = getsymbol(exp[2]) | |
417 | return (templateutil.runmember, (darg, memb)) |
|
417 | return (templateutil.runmember, (darg, memb)) | |
418 |
|
418 | |||
419 | def buildnegate(exp, context): |
|
419 | def buildnegate(exp, context): | |
420 | arg = compileexp(exp[1], context, exprmethods) |
|
420 | arg = compileexp(exp[1], context, exprmethods) | |
421 | return (templateutil.runnegate, arg) |
|
421 | return (templateutil.runnegate, arg) | |
422 |
|
422 | |||
423 | def buildarithmetic(exp, context, func): |
|
423 | def buildarithmetic(exp, context, func): | |
424 | left = compileexp(exp[1], context, exprmethods) |
|
424 | left = compileexp(exp[1], context, exprmethods) | |
425 | right = compileexp(exp[2], context, exprmethods) |
|
425 | right = compileexp(exp[2], context, exprmethods) | |
426 | return (templateutil.runarithmetic, (func, left, right)) |
|
426 | return (templateutil.runarithmetic, (func, left, right)) | |
427 |
|
427 | |||
428 | def buildfunc(exp, context): |
|
428 | def buildfunc(exp, context): | |
429 | n = getsymbol(exp[1]) |
|
429 | n = getsymbol(exp[1]) | |
430 | if n in context._funcs: |
|
430 | if n in context._funcs: | |
431 | f = context._funcs[n] |
|
431 | f = context._funcs[n] | |
432 | args = _buildfuncargs(exp[2], context, exprmethods, n, f._argspec) |
|
432 | args = _buildfuncargs(exp[2], context, exprmethods, n, f._argspec) | |
433 | return (f, args) |
|
433 | return (f, args) | |
434 | if n in context._filters: |
|
434 | if n in context._filters: | |
435 | args = _buildfuncargs(exp[2], context, exprmethods, n, argspec=None) |
|
435 | args = _buildfuncargs(exp[2], context, exprmethods, n, argspec=None) | |
436 | if len(args) != 1: |
|
436 | if len(args) != 1: | |
437 | raise error.ParseError(_("filter %s expects one argument") % n) |
|
437 | raise error.ParseError(_("filter %s expects one argument") % n) | |
438 | f = context._filters[n] |
|
438 | f = context._filters[n] | |
439 | return (templateutil.runfilter, (args[0], f)) |
|
439 | return (templateutil.runfilter, (args[0], f)) | |
440 | raise error.ParseError(_("unknown function '%s'") % n) |
|
440 | raise error.ParseError(_("unknown function '%s'") % n) | |
441 |
|
441 | |||
442 | def _buildfuncargs(exp, context, curmethods, funcname, argspec): |
|
442 | def _buildfuncargs(exp, context, curmethods, funcname, argspec): | |
443 | """Compile parsed tree of function arguments into list or dict of |
|
443 | """Compile parsed tree of function arguments into list or dict of | |
444 | (func, data) pairs |
|
444 | (func, data) pairs | |
445 |
|
445 | |||
446 | >>> context = engine(lambda t: (templateutil.runsymbol, t)) |
|
446 | >>> context = engine(lambda t: (templateutil.runsymbol, t)) | |
447 | >>> def fargs(expr, argspec): |
|
447 | >>> def fargs(expr, argspec): | |
448 | ... x = _parseexpr(expr) |
|
448 | ... x = _parseexpr(expr) | |
449 | ... n = getsymbol(x[1]) |
|
449 | ... n = getsymbol(x[1]) | |
450 | ... return _buildfuncargs(x[2], context, exprmethods, n, argspec) |
|
450 | ... return _buildfuncargs(x[2], context, exprmethods, n, argspec) | |
451 | >>> list(fargs(b'a(l=1, k=2)', b'k l m').keys()) |
|
451 | >>> list(fargs(b'a(l=1, k=2)', b'k l m').keys()) | |
452 | ['l', 'k'] |
|
452 | ['l', 'k'] | |
453 | >>> args = fargs(b'a(opts=1, k=2)', b'**opts') |
|
453 | >>> args = fargs(b'a(opts=1, k=2)', b'**opts') | |
454 | >>> list(args.keys()), list(args[b'opts'].keys()) |
|
454 | >>> list(args.keys()), list(args[b'opts'].keys()) | |
455 | (['opts'], ['opts', 'k']) |
|
455 | (['opts'], ['opts', 'k']) | |
456 | """ |
|
456 | """ | |
457 | def compiledict(xs): |
|
457 | def compiledict(xs): | |
458 | return util.sortdict((k, compileexp(x, context, curmethods)) |
|
458 | return util.sortdict((k, compileexp(x, context, curmethods)) | |
459 | for k, x in xs.iteritems()) |
|
459 | for k, x in xs.iteritems()) | |
460 | def compilelist(xs): |
|
460 | def compilelist(xs): | |
461 | return [compileexp(x, context, curmethods) for x in xs] |
|
461 | return [compileexp(x, context, curmethods) for x in xs] | |
462 |
|
462 | |||
463 | if not argspec: |
|
463 | if not argspec: | |
464 | # filter or function with no argspec: return list of positional args |
|
464 | # filter or function with no argspec: return list of positional args | |
465 | return compilelist(getlist(exp)) |
|
465 | return compilelist(getlist(exp)) | |
466 |
|
466 | |||
467 | # function with argspec: return dict of named args |
|
467 | # function with argspec: return dict of named args | |
468 | _poskeys, varkey, _keys, optkey = argspec = parser.splitargspec(argspec) |
|
468 | _poskeys, varkey, _keys, optkey = argspec = parser.splitargspec(argspec) | |
469 | treeargs = parser.buildargsdict(getlist(exp), funcname, argspec, |
|
469 | treeargs = parser.buildargsdict(getlist(exp), funcname, argspec, | |
470 | keyvaluenode='keyvalue', keynode='symbol') |
|
470 | keyvaluenode='keyvalue', keynode='symbol') | |
471 | compargs = util.sortdict() |
|
471 | compargs = util.sortdict() | |
472 | if varkey: |
|
472 | if varkey: | |
473 | compargs[varkey] = compilelist(treeargs.pop(varkey)) |
|
473 | compargs[varkey] = compilelist(treeargs.pop(varkey)) | |
474 | if optkey: |
|
474 | if optkey: | |
475 | compargs[optkey] = compiledict(treeargs.pop(optkey)) |
|
475 | compargs[optkey] = compiledict(treeargs.pop(optkey)) | |
476 | compargs.update(compiledict(treeargs)) |
|
476 | compargs.update(compiledict(treeargs)) | |
477 | return compargs |
|
477 | return compargs | |
478 |
|
478 | |||
479 | def buildkeyvaluepair(exp, content): |
|
479 | def buildkeyvaluepair(exp, content): | |
480 | raise error.ParseError(_("can't use a key-value pair in this context")) |
|
480 | raise error.ParseError(_("can't use a key-value pair in this context")) | |
481 |
|
481 | |||
482 | # methods to interpret function arguments or inner expressions (e.g. {_(x)}) |
|
482 | # methods to interpret function arguments or inner expressions (e.g. {_(x)}) | |
483 | exprmethods = { |
|
483 | exprmethods = { | |
484 | "integer": lambda e, c: (templateutil.runinteger, e[1]), |
|
484 | "integer": lambda e, c: (templateutil.runinteger, e[1]), | |
485 | "string": lambda e, c: (templateutil.runstring, e[1]), |
|
485 | "string": lambda e, c: (templateutil.runstring, e[1]), | |
486 | "symbol": lambda e, c: (templateutil.runsymbol, e[1]), |
|
486 | "symbol": lambda e, c: (templateutil.runsymbol, e[1]), | |
487 | "template": buildtemplate, |
|
487 | "template": buildtemplate, | |
488 | "group": lambda e, c: compileexp(e[1], c, exprmethods), |
|
488 | "group": lambda e, c: compileexp(e[1], c, exprmethods), | |
489 | ".": buildmember, |
|
489 | ".": buildmember, | |
490 | "|": buildfilter, |
|
490 | "|": buildfilter, | |
491 | "%": buildmap, |
|
491 | "%": buildmap, | |
492 | "func": buildfunc, |
|
492 | "func": buildfunc, | |
493 | "keyvalue": buildkeyvaluepair, |
|
493 | "keyvalue": buildkeyvaluepair, | |
494 | "+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b), |
|
494 | "+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b), | |
495 | "-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b), |
|
495 | "-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b), | |
496 | "negate": buildnegate, |
|
496 | "negate": buildnegate, | |
497 | "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b), |
|
497 | "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b), | |
498 | "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b), |
|
498 | "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b), | |
499 | } |
|
499 | } | |
500 |
|
500 | |||
501 | # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"}) |
|
501 | # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"}) | |
502 | methods = exprmethods.copy() |
|
502 | methods = exprmethods.copy() | |
503 | methods["integer"] = exprmethods["symbol"] # '{1}' as variable |
|
503 | methods["integer"] = exprmethods["symbol"] # '{1}' as variable | |
504 |
|
504 | |||
505 | class _aliasrules(parser.basealiasrules): |
|
505 | class _aliasrules(parser.basealiasrules): | |
506 | """Parsing and expansion rule set of template aliases""" |
|
506 | """Parsing and expansion rule set of template aliases""" | |
507 | _section = _('template alias') |
|
507 | _section = _('template alias') | |
508 | _parse = staticmethod(_parseexpr) |
|
508 | _parse = staticmethod(_parseexpr) | |
509 |
|
509 | |||
510 | @staticmethod |
|
510 | @staticmethod | |
511 | def _trygetfunc(tree): |
|
511 | def _trygetfunc(tree): | |
512 | """Return (name, args) if tree is func(...) or ...|filter; otherwise |
|
512 | """Return (name, args) if tree is func(...) or ...|filter; otherwise | |
513 | None""" |
|
513 | None""" | |
514 | if tree[0] == 'func' and tree[1][0] == 'symbol': |
|
514 | if tree[0] == 'func' and tree[1][0] == 'symbol': | |
515 | return tree[1][1], getlist(tree[2]) |
|
515 | return tree[1][1], getlist(tree[2]) | |
516 | if tree[0] == '|' and tree[2][0] == 'symbol': |
|
516 | if tree[0] == '|' and tree[2][0] == 'symbol': | |
517 | return tree[2][1], [tree[1]] |
|
517 | return tree[2][1], [tree[1]] | |
518 |
|
518 | |||
519 | def expandaliases(tree, aliases): |
|
519 | def expandaliases(tree, aliases): | |
520 | """Return new tree of aliases are expanded""" |
|
520 | """Return new tree of aliases are expanded""" | |
521 | aliasmap = _aliasrules.buildmap(aliases) |
|
521 | aliasmap = _aliasrules.buildmap(aliases) | |
522 | return _aliasrules.expand(aliasmap, tree) |
|
522 | return _aliasrules.expand(aliasmap, tree) | |
523 |
|
523 | |||
524 | # template engine |
|
524 | # template engine | |
525 |
|
525 | |||
526 | def _flatten(thing): |
|
526 | def _flatten(thing): | |
527 | '''yield a single stream from a possibly nested set of iterators''' |
|
527 | '''yield a single stream from a possibly nested set of iterators''' | |
528 | thing = templateutil.unwraphybrid(thing) |
|
528 | thing = templateutil.unwraphybrid(thing) | |
529 | if isinstance(thing, bytes): |
|
529 | if isinstance(thing, bytes): | |
530 | yield thing |
|
530 | yield thing | |
531 | elif isinstance(thing, str): |
|
531 | elif isinstance(thing, str): | |
532 | # We can only hit this on Python 3, and it's here to guard |
|
532 | # We can only hit this on Python 3, and it's here to guard | |
533 | # against infinite recursion. |
|
533 | # against infinite recursion. | |
534 | raise error.ProgrammingError('Mercurial IO including templates is done' |
|
534 | raise error.ProgrammingError('Mercurial IO including templates is done' | |
535 | ' with bytes, not strings, got %r' % thing) |
|
535 | ' with bytes, not strings, got %r' % thing) | |
536 | elif thing is None: |
|
536 | elif thing is None: | |
537 | pass |
|
537 | pass | |
538 | elif not util.safehasattr(thing, '__iter__'): |
|
538 | elif not util.safehasattr(thing, '__iter__'): | |
539 | yield pycompat.bytestr(thing) |
|
539 | yield pycompat.bytestr(thing) | |
540 | else: |
|
540 | else: | |
541 | for i in thing: |
|
541 | for i in thing: | |
542 | i = templateutil.unwraphybrid(i) |
|
542 | i = templateutil.unwraphybrid(i) | |
543 | if isinstance(i, bytes): |
|
543 | if isinstance(i, bytes): | |
544 | yield i |
|
544 | yield i | |
545 | elif i is None: |
|
545 | elif i is None: | |
546 | pass |
|
546 | pass | |
547 | elif not util.safehasattr(i, '__iter__'): |
|
547 | elif not util.safehasattr(i, '__iter__'): | |
548 | yield pycompat.bytestr(i) |
|
548 | yield pycompat.bytestr(i) | |
549 | else: |
|
549 | else: | |
550 | for j in _flatten(i): |
|
550 | for j in _flatten(i): | |
551 | yield j |
|
551 | yield j | |
552 |
|
552 | |||
553 | def unquotestring(s): |
|
553 | def unquotestring(s): | |
554 | '''unwrap quotes if any; otherwise returns unmodified string''' |
|
554 | '''unwrap quotes if any; otherwise returns unmodified string''' | |
555 | if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]: |
|
555 | if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]: | |
556 | return s |
|
556 | return s | |
557 | return s[1:-1] |
|
557 | return s[1:-1] | |
558 |
|
558 | |||
559 | class engine(object): |
|
559 | class engine(object): | |
560 | '''template expansion engine. |
|
560 | '''template expansion engine. | |
561 |
|
561 | |||
562 | template expansion works like this. a map file contains key=value |
|
562 | template expansion works like this. a map file contains key=value | |
563 | pairs. if value is quoted, it is treated as string. otherwise, it |
|
563 | pairs. if value is quoted, it is treated as string. otherwise, it | |
564 | is treated as name of template file. |
|
564 | is treated as name of template file. | |
565 |
|
565 | |||
566 | templater is asked to expand a key in map. it looks up key, and |
|
566 | templater is asked to expand a key in map. it looks up key, and | |
567 | looks for strings like this: {foo}. it expands {foo} by looking up |
|
567 | looks for strings like this: {foo}. it expands {foo} by looking up | |
568 | foo in map, and substituting it. expansion is recursive: it stops |
|
568 | foo in map, and substituting it. expansion is recursive: it stops | |
569 | when there is no more {foo} to replace. |
|
569 | when there is no more {foo} to replace. | |
570 |
|
570 | |||
571 | expansion also allows formatting and filtering. |
|
571 | expansion also allows formatting and filtering. | |
572 |
|
572 | |||
573 | format uses key to expand each item in list. syntax is |
|
573 | format uses key to expand each item in list. syntax is | |
574 | {key%format}. |
|
574 | {key%format}. | |
575 |
|
575 | |||
576 | filter uses function to transform value. syntax is |
|
576 | filter uses function to transform value. syntax is | |
577 | {key|filter1|filter2|...}.''' |
|
577 | {key|filter1|filter2|...}.''' | |
578 |
|
578 | |||
579 | def __init__(self, loader, filters=None, defaults=None, resources=None, |
|
579 | def __init__(self, loader, filters=None, defaults=None, resources=None, | |
580 | aliases=()): |
|
580 | aliases=()): | |
581 | self._loader = loader |
|
581 | self._loader = loader | |
582 | if filters is None: |
|
582 | if filters is None: | |
583 | filters = {} |
|
583 | filters = {} | |
584 | self._filters = filters |
|
584 | self._filters = filters | |
585 | self._funcs = templatefuncs.funcs # make this a parameter if needed |
|
585 | self._funcs = templatefuncs.funcs # make this a parameter if needed | |
586 | if defaults is None: |
|
586 | if defaults is None: | |
587 | defaults = {} |
|
587 | defaults = {} | |
588 | if resources is None: |
|
588 | if resources is None: | |
589 | resources = {} |
|
589 | resources = {} | |
590 | self._defaults = defaults |
|
590 | self._defaults = defaults | |
591 | self._resources = resources |
|
591 | self._resources = resources | |
592 | self._aliasmap = _aliasrules.buildmap(aliases) |
|
592 | self._aliasmap = _aliasrules.buildmap(aliases) | |
593 | self._cache = {} # key: (func, data) |
|
593 | self._cache = {} # key: (func, data) | |
594 |
|
594 | |||
595 | def symbol(self, mapping, key): |
|
595 | def symbol(self, mapping, key): | |
596 | """Resolve symbol to value or function; None if nothing found""" |
|
596 | """Resolve symbol to value or function; None if nothing found""" | |
597 | v = None |
|
597 | v = None | |
598 | if key not in self._resources: |
|
598 | if key not in self._resources: | |
599 | v = mapping.get(key) |
|
599 | v = mapping.get(key) | |
600 | if v is None: |
|
600 | if v is None: | |
601 | v = self._defaults.get(key) |
|
601 | v = self._defaults.get(key) | |
602 | return v |
|
602 | return v | |
603 |
|
603 | |||
604 | def resource(self, mapping, key): |
|
604 | def resource(self, mapping, key): | |
605 | """Return internal data (e.g. cache) used for keyword/function |
|
605 | """Return internal data (e.g. cache) used for keyword/function | |
606 | evaluation""" |
|
606 | evaluation""" | |
607 | v = None |
|
607 | v = None | |
608 | if key in self._resources: |
|
608 | if key in self._resources: | |
609 | v = self._resources[key](self, mapping, key) |
|
609 | v = self._resources[key](self, mapping, key) | |
610 | if v is None: |
|
610 | if v is None: | |
611 | raise templateutil.ResourceUnavailable( |
|
611 | raise templateutil.ResourceUnavailable( | |
612 | _('template resource not available: %s') % key) |
|
612 | _('template resource not available: %s') % key) | |
613 | return v |
|
613 | return v | |
614 |
|
614 | |||
615 | def _load(self, t): |
|
615 | def _load(self, t): | |
616 | '''load, parse, and cache a template''' |
|
616 | '''load, parse, and cache a template''' | |
617 | if t not in self._cache: |
|
617 | if t not in self._cache: | |
618 | # put poison to cut recursion while compiling 't' |
|
618 | # put poison to cut recursion while compiling 't' | |
619 | self._cache[t] = (_runrecursivesymbol, t) |
|
619 | self._cache[t] = (_runrecursivesymbol, t) | |
620 | try: |
|
620 | try: | |
621 | x = parse(self._loader(t)) |
|
621 | x = parse(self._loader(t)) | |
622 | if self._aliasmap: |
|
622 | if self._aliasmap: | |
623 | x = _aliasrules.expand(self._aliasmap, x) |
|
623 | x = _aliasrules.expand(self._aliasmap, x) | |
624 | self._cache[t] = compileexp(x, self, methods) |
|
624 | self._cache[t] = compileexp(x, self, methods) | |
625 | except: # re-raises |
|
625 | except: # re-raises | |
626 | del self._cache[t] |
|
626 | del self._cache[t] | |
627 | raise |
|
627 | raise | |
628 | return self._cache[t] |
|
628 | return self._cache[t] | |
629 |
|
629 | |||
630 | def process(self, t, mapping): |
|
630 | def process(self, t, mapping): | |
631 | '''Perform expansion. t is name of map element to expand. |
|
631 | '''Perform expansion. t is name of map element to expand. | |
632 | mapping contains added elements for use during expansion. Is a |
|
632 | mapping contains added elements for use during expansion. Is a | |
633 | generator.''' |
|
633 | generator.''' | |
634 | func, data = self._load(t) |
|
634 | func, data = self._load(t) | |
635 | return _flatten(func(self, mapping, data)) |
|
635 | return _flatten(func(self, mapping, data)) | |
636 |
|
636 | |||
637 | engines = {'default': engine} |
|
637 | engines = {'default': engine} | |
638 |
|
638 | |||
639 | def stylelist(): |
|
639 | def stylelist(): | |
640 | paths = templatepaths() |
|
640 | paths = templatepaths() | |
641 | if not paths: |
|
641 | if not paths: | |
642 | return _('no templates found, try `hg debuginstall` for more info') |
|
642 | return _('no templates found, try `hg debuginstall` for more info') | |
643 | dirlist = os.listdir(paths[0]) |
|
643 | dirlist = os.listdir(paths[0]) | |
644 | stylelist = [] |
|
644 | stylelist = [] | |
645 | for file in dirlist: |
|
645 | for file in dirlist: | |
646 | split = file.split(".") |
|
646 | split = file.split(".") | |
647 | if split[-1] in ('orig', 'rej'): |
|
647 | if split[-1] in ('orig', 'rej'): | |
648 | continue |
|
648 | continue | |
649 | if split[0] == "map-cmdline": |
|
649 | if split[0] == "map-cmdline": | |
650 | stylelist.append(split[1]) |
|
650 | stylelist.append(split[1]) | |
651 | return ", ".join(sorted(stylelist)) |
|
651 | return ", ".join(sorted(stylelist)) | |
652 |
|
652 | |||
653 | def _readmapfile(mapfile): |
|
653 | def _readmapfile(mapfile): | |
654 | """Load template elements from the given map file""" |
|
654 | """Load template elements from the given map file""" | |
655 | if not os.path.exists(mapfile): |
|
655 | if not os.path.exists(mapfile): | |
656 | raise error.Abort(_("style '%s' not found") % mapfile, |
|
656 | raise error.Abort(_("style '%s' not found") % mapfile, | |
657 | hint=_("available styles: %s") % stylelist()) |
|
657 | hint=_("available styles: %s") % stylelist()) | |
658 |
|
658 | |||
659 | base = os.path.dirname(mapfile) |
|
659 | base = os.path.dirname(mapfile) | |
660 | conf = config.config(includepaths=templatepaths()) |
|
660 | conf = config.config(includepaths=templatepaths()) | |
661 | conf.read(mapfile, remap={'': 'templates'}) |
|
661 | conf.read(mapfile, remap={'': 'templates'}) | |
662 |
|
662 | |||
663 | cache = {} |
|
663 | cache = {} | |
664 | tmap = {} |
|
664 | tmap = {} | |
665 | aliases = [] |
|
665 | aliases = [] | |
666 |
|
666 | |||
667 | val = conf.get('templates', '__base__') |
|
667 | val = conf.get('templates', '__base__') | |
668 | if val and val[0] not in "'\"": |
|
668 | if val and val[0] not in "'\"": | |
669 | # treat as a pointer to a base class for this style |
|
669 | # treat as a pointer to a base class for this style | |
670 | path = util.normpath(os.path.join(base, val)) |
|
670 | path = util.normpath(os.path.join(base, val)) | |
671 |
|
671 | |||
672 | # fallback check in template paths |
|
672 | # fallback check in template paths | |
673 | if not os.path.exists(path): |
|
673 | if not os.path.exists(path): | |
674 | for p in templatepaths(): |
|
674 | for p in templatepaths(): | |
675 | p2 = util.normpath(os.path.join(p, val)) |
|
675 | p2 = util.normpath(os.path.join(p, val)) | |
676 | if os.path.isfile(p2): |
|
676 | if os.path.isfile(p2): | |
677 | path = p2 |
|
677 | path = p2 | |
678 | break |
|
678 | break | |
679 | p3 = util.normpath(os.path.join(p2, "map")) |
|
679 | p3 = util.normpath(os.path.join(p2, "map")) | |
680 | if os.path.isfile(p3): |
|
680 | if os.path.isfile(p3): | |
681 | path = p3 |
|
681 | path = p3 | |
682 | break |
|
682 | break | |
683 |
|
683 | |||
684 | cache, tmap, aliases = _readmapfile(path) |
|
684 | cache, tmap, aliases = _readmapfile(path) | |
685 |
|
685 | |||
686 | for key, val in conf['templates'].items(): |
|
686 | for key, val in conf['templates'].items(): | |
687 | if not val: |
|
687 | if not val: | |
688 | raise error.ParseError(_('missing value'), |
|
688 | raise error.ParseError(_('missing value'), | |
689 | conf.source('templates', key)) |
|
689 | conf.source('templates', key)) | |
690 | if val[0] in "'\"": |
|
690 | if val[0] in "'\"": | |
691 | if val[0] != val[-1]: |
|
691 | if val[0] != val[-1]: | |
692 | raise error.ParseError(_('unmatched quotes'), |
|
692 | raise error.ParseError(_('unmatched quotes'), | |
693 | conf.source('templates', key)) |
|
693 | conf.source('templates', key)) | |
694 | cache[key] = unquotestring(val) |
|
694 | cache[key] = unquotestring(val) | |
695 | elif key != '__base__': |
|
695 | elif key != '__base__': | |
696 | val = 'default', val |
|
696 | val = 'default', val | |
697 | if ':' in val[1]: |
|
697 | if ':' in val[1]: | |
698 | val = val[1].split(':', 1) |
|
698 | val = val[1].split(':', 1) | |
699 | tmap[key] = val[0], os.path.join(base, val[1]) |
|
699 | tmap[key] = val[0], os.path.join(base, val[1]) | |
700 | aliases.extend(conf['templatealias'].items()) |
|
700 | aliases.extend(conf['templatealias'].items()) | |
701 | return cache, tmap, aliases |
|
701 | return cache, tmap, aliases | |
702 |
|
702 | |||
703 | class templater(object): |
|
703 | class templater(object): | |
704 |
|
704 | |||
705 | def __init__(self, filters=None, defaults=None, resources=None, |
|
705 | def __init__(self, filters=None, defaults=None, resources=None, | |
706 | cache=None, aliases=(), minchunk=1024, maxchunk=65536): |
|
706 | cache=None, aliases=(), minchunk=1024, maxchunk=65536): | |
707 | """Create template engine optionally with preloaded template fragments |
|
707 | """Create template engine optionally with preloaded template fragments | |
708 |
|
708 | |||
709 | - ``filters``: a dict of functions to transform a value into another. |
|
709 | - ``filters``: a dict of functions to transform a value into another. | |
710 | - ``defaults``: a dict of symbol values/functions; may be overridden |
|
710 | - ``defaults``: a dict of symbol values/functions; may be overridden | |
711 | by a ``mapping`` dict. |
|
711 | by a ``mapping`` dict. | |
712 | - ``resources``: a dict of functions returning internal data |
|
712 | - ``resources``: a dict of functions returning internal data | |
713 | (e.g. cache), inaccessible from user template. |
|
713 | (e.g. cache), inaccessible from user template. | |
714 | - ``cache``: a dict of preloaded template fragments. |
|
714 | - ``cache``: a dict of preloaded template fragments. | |
715 | - ``aliases``: a list of alias (name, replacement) pairs. |
|
715 | - ``aliases``: a list of alias (name, replacement) pairs. | |
716 |
|
716 | |||
717 | self.cache may be updated later to register additional template |
|
717 | self.cache may be updated later to register additional template | |
718 | fragments. |
|
718 | fragments. | |
719 | """ |
|
719 | """ | |
720 | if filters is None: |
|
720 | if filters is None: | |
721 | filters = {} |
|
721 | filters = {} | |
722 | if defaults is None: |
|
722 | if defaults is None: | |
723 | defaults = {} |
|
723 | defaults = {} | |
724 | if resources is None: |
|
724 | if resources is None: | |
725 | resources = {} |
|
725 | resources = {} | |
726 | if cache is None: |
|
726 | if cache is None: | |
727 | cache = {} |
|
727 | cache = {} | |
728 | self.cache = cache.copy() |
|
728 | self.cache = cache.copy() | |
729 | self.map = {} |
|
729 | self.map = {} | |
730 | self.filters = templatefilters.filters.copy() |
|
730 | self.filters = templatefilters.filters.copy() | |
731 | self.filters.update(filters) |
|
731 | self.filters.update(filters) | |
732 | self.defaults = defaults |
|
732 | self.defaults = defaults | |
733 | self._resources = {'templ': lambda context, mapping, key: self} |
|
733 | self._resources = {'templ': lambda context, mapping, key: self} | |
734 | self._resources.update(resources) |
|
734 | self._resources.update(resources) | |
735 | self._aliases = aliases |
|
735 | self._aliases = aliases | |
736 | self.minchunk, self.maxchunk = minchunk, maxchunk |
|
736 | self.minchunk, self.maxchunk = minchunk, maxchunk | |
737 | self.ecache = {} |
|
737 | self.ecache = {} | |
738 |
|
738 | |||
739 | @classmethod |
|
739 | @classmethod | |
740 | def frommapfile(cls, mapfile, filters=None, defaults=None, resources=None, |
|
740 | def frommapfile(cls, mapfile, filters=None, defaults=None, resources=None, | |
741 | cache=None, minchunk=1024, maxchunk=65536): |
|
741 | cache=None, minchunk=1024, maxchunk=65536): | |
742 | """Create templater from the specified map file""" |
|
742 | """Create templater from the specified map file""" | |
743 | t = cls(filters, defaults, resources, cache, [], minchunk, maxchunk) |
|
743 | t = cls(filters, defaults, resources, cache, [], minchunk, maxchunk) | |
744 | cache, tmap, aliases = _readmapfile(mapfile) |
|
744 | cache, tmap, aliases = _readmapfile(mapfile) | |
745 | t.cache.update(cache) |
|
745 | t.cache.update(cache) | |
746 | t.map = tmap |
|
746 | t.map = tmap | |
747 | t._aliases = aliases |
|
747 | t._aliases = aliases | |
748 | return t |
|
748 | return t | |
749 |
|
749 | |||
750 | def __contains__(self, key): |
|
750 | def __contains__(self, key): | |
751 | return key in self.cache or key in self.map |
|
751 | return key in self.cache or key in self.map | |
752 |
|
752 | |||
753 | def load(self, t): |
|
753 | def load(self, t): | |
754 | '''Get the template for the given template name. Use a local cache.''' |
|
754 | '''Get the template for the given template name. Use a local cache.''' | |
755 | if t not in self.cache: |
|
755 | if t not in self.cache: | |
756 | try: |
|
756 | try: | |
757 | self.cache[t] = util.readfile(self.map[t][1]) |
|
757 | self.cache[t] = util.readfile(self.map[t][1]) | |
758 | except KeyError as inst: |
|
758 | except KeyError as inst: | |
759 | raise templateutil.TemplateNotFound( |
|
759 | raise templateutil.TemplateNotFound( | |
760 | _('"%s" not in template map') % inst.args[0]) |
|
760 | _('"%s" not in template map') % inst.args[0]) | |
761 | except IOError as inst: |
|
761 | except IOError as inst: | |
762 | reason = (_('template file %s: %s') |
|
762 | reason = (_('template file %s: %s') | |
763 | % (self.map[t][1], util.forcebytestr(inst.args[1]))) |
|
763 | % (self.map[t][1], util.forcebytestr(inst.args[1]))) | |
764 | raise IOError(inst.args[0], encoding.strfromlocal(reason)) |
|
764 | raise IOError(inst.args[0], encoding.strfromlocal(reason)) | |
765 | return self.cache[t] |
|
765 | return self.cache[t] | |
766 |
|
766 | |||
767 | def renderdefault(self, mapping): |
|
767 | def renderdefault(self, mapping): | |
768 | """Render the default unnamed template and return result as string""" |
|
768 | """Render the default unnamed template and return result as string""" | |
769 | return self.render('', mapping) |
|
769 | return self.render('', mapping) | |
770 |
|
770 | |||
771 | def render(self, t, mapping): |
|
771 | def render(self, t, mapping): | |
772 | """Render the specified named template and return result as string""" |
|
772 | """Render the specified named template and return result as string""" | |
773 | mapping = pycompat.strkwargs(mapping) |
|
773 | return templateutil.stringify(self.generate(t, mapping)) | |
774 | return templateutil.stringify(self(t, **mapping)) |
|
|||
775 |
|
774 | |||
776 |
def |
|
775 | def generate(self, t, mapping): | |
777 | mapping = pycompat.byteskwargs(mapping) |
|
776 | """Return a generator that renders the specified named template and | |
|
777 | yields chunks""" | |||
778 | ttype = t in self.map and self.map[t][0] or 'default' |
|
778 | ttype = t in self.map and self.map[t][0] or 'default' | |
779 | if ttype not in self.ecache: |
|
779 | if ttype not in self.ecache: | |
780 | try: |
|
780 | try: | |
781 | ecls = engines[ttype] |
|
781 | ecls = engines[ttype] | |
782 | except KeyError: |
|
782 | except KeyError: | |
783 | raise error.Abort(_('invalid template engine: %s') % ttype) |
|
783 | raise error.Abort(_('invalid template engine: %s') % ttype) | |
784 | self.ecache[ttype] = ecls(self.load, self.filters, self.defaults, |
|
784 | self.ecache[ttype] = ecls(self.load, self.filters, self.defaults, | |
785 | self._resources, self._aliases) |
|
785 | self._resources, self._aliases) | |
786 | proc = self.ecache[ttype] |
|
786 | proc = self.ecache[ttype] | |
787 |
|
787 | |||
788 | stream = proc.process(t, mapping) |
|
788 | stream = proc.process(t, mapping) | |
789 | if self.minchunk: |
|
789 | if self.minchunk: | |
790 | stream = util.increasingchunks(stream, min=self.minchunk, |
|
790 | stream = util.increasingchunks(stream, min=self.minchunk, | |
791 | max=self.maxchunk) |
|
791 | max=self.maxchunk) | |
792 | return stream |
|
792 | return stream | |
793 |
|
793 | |||
794 | def templatepaths(): |
|
794 | def templatepaths(): | |
795 | '''return locations used for template files.''' |
|
795 | '''return locations used for template files.''' | |
796 | pathsrel = ['templates'] |
|
796 | pathsrel = ['templates'] | |
797 | paths = [os.path.normpath(os.path.join(util.datapath, f)) |
|
797 | paths = [os.path.normpath(os.path.join(util.datapath, f)) | |
798 | for f in pathsrel] |
|
798 | for f in pathsrel] | |
799 | return [p for p in paths if os.path.isdir(p)] |
|
799 | return [p for p in paths if os.path.isdir(p)] | |
800 |
|
800 | |||
801 | def templatepath(name): |
|
801 | def templatepath(name): | |
802 | '''return location of template file. returns None if not found.''' |
|
802 | '''return location of template file. returns None if not found.''' | |
803 | for p in templatepaths(): |
|
803 | for p in templatepaths(): | |
804 | f = os.path.join(p, name) |
|
804 | f = os.path.join(p, name) | |
805 | if os.path.exists(f): |
|
805 | if os.path.exists(f): | |
806 | return f |
|
806 | return f | |
807 | return None |
|
807 | return None | |
808 |
|
808 | |||
809 | def stylemap(styles, paths=None): |
|
809 | def stylemap(styles, paths=None): | |
810 | """Return path to mapfile for a given style. |
|
810 | """Return path to mapfile for a given style. | |
811 |
|
811 | |||
812 | Searches mapfile in the following locations: |
|
812 | Searches mapfile in the following locations: | |
813 | 1. templatepath/style/map |
|
813 | 1. templatepath/style/map | |
814 | 2. templatepath/map-style |
|
814 | 2. templatepath/map-style | |
815 | 3. templatepath/map |
|
815 | 3. templatepath/map | |
816 | """ |
|
816 | """ | |
817 |
|
817 | |||
818 | if paths is None: |
|
818 | if paths is None: | |
819 | paths = templatepaths() |
|
819 | paths = templatepaths() | |
820 | elif isinstance(paths, bytes): |
|
820 | elif isinstance(paths, bytes): | |
821 | paths = [paths] |
|
821 | paths = [paths] | |
822 |
|
822 | |||
823 | if isinstance(styles, bytes): |
|
823 | if isinstance(styles, bytes): | |
824 | styles = [styles] |
|
824 | styles = [styles] | |
825 |
|
825 | |||
826 | for style in styles: |
|
826 | for style in styles: | |
827 | # only plain name is allowed to honor template paths |
|
827 | # only plain name is allowed to honor template paths | |
828 | if (not style |
|
828 | if (not style | |
829 | or style in (pycompat.oscurdir, pycompat.ospardir) |
|
829 | or style in (pycompat.oscurdir, pycompat.ospardir) | |
830 | or pycompat.ossep in style |
|
830 | or pycompat.ossep in style | |
831 | or pycompat.osaltsep and pycompat.osaltsep in style): |
|
831 | or pycompat.osaltsep and pycompat.osaltsep in style): | |
832 | continue |
|
832 | continue | |
833 | locations = [os.path.join(style, 'map'), 'map-' + style] |
|
833 | locations = [os.path.join(style, 'map'), 'map-' + style] | |
834 | locations.append('map') |
|
834 | locations.append('map') | |
835 |
|
835 | |||
836 | for path in paths: |
|
836 | for path in paths: | |
837 | for location in locations: |
|
837 | for location in locations: | |
838 | mapfile = os.path.join(path, location) |
|
838 | mapfile = os.path.join(path, location) | |
839 | if os.path.isfile(mapfile): |
|
839 | if os.path.isfile(mapfile): | |
840 | return style, mapfile |
|
840 | return style, mapfile | |
841 |
|
841 | |||
842 | raise RuntimeError("No hgweb templates found in %r" % paths) |
|
842 | raise RuntimeError("No hgweb templates found in %r" % paths) |
@@ -1,449 +1,448 b'' | |||||
1 | # templateutil.py - utility for template evaluation |
|
1 | # templateutil.py - utility for template evaluation | |
2 | # |
|
2 | # | |
3 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | import types |
|
10 | import types | |
11 |
|
11 | |||
12 | from .i18n import _ |
|
12 | from .i18n import _ | |
13 | from . import ( |
|
13 | from . import ( | |
14 | error, |
|
14 | error, | |
15 | pycompat, |
|
15 | pycompat, | |
16 | util, |
|
16 | util, | |
17 | ) |
|
17 | ) | |
18 |
|
18 | |||
19 | class ResourceUnavailable(error.Abort): |
|
19 | class ResourceUnavailable(error.Abort): | |
20 | pass |
|
20 | pass | |
21 |
|
21 | |||
22 | class TemplateNotFound(error.Abort): |
|
22 | class TemplateNotFound(error.Abort): | |
23 | pass |
|
23 | pass | |
24 |
|
24 | |||
25 | class hybrid(object): |
|
25 | class hybrid(object): | |
26 | """Wrapper for list or dict to support legacy template |
|
26 | """Wrapper for list or dict to support legacy template | |
27 |
|
27 | |||
28 | This class allows us to handle both: |
|
28 | This class allows us to handle both: | |
29 | - "{files}" (legacy command-line-specific list hack) and |
|
29 | - "{files}" (legacy command-line-specific list hack) and | |
30 | - "{files % '{file}\n'}" (hgweb-style with inlining and function support) |
|
30 | - "{files % '{file}\n'}" (hgweb-style with inlining and function support) | |
31 | and to access raw values: |
|
31 | and to access raw values: | |
32 | - "{ifcontains(file, files, ...)}", "{ifcontains(key, extras, ...)}" |
|
32 | - "{ifcontains(file, files, ...)}", "{ifcontains(key, extras, ...)}" | |
33 | - "{get(extras, key)}" |
|
33 | - "{get(extras, key)}" | |
34 | - "{files|json}" |
|
34 | - "{files|json}" | |
35 | """ |
|
35 | """ | |
36 |
|
36 | |||
37 | def __init__(self, gen, values, makemap, joinfmt, keytype=None): |
|
37 | def __init__(self, gen, values, makemap, joinfmt, keytype=None): | |
38 | if gen is not None: |
|
38 | if gen is not None: | |
39 | self.gen = gen # generator or function returning generator |
|
39 | self.gen = gen # generator or function returning generator | |
40 | self._values = values |
|
40 | self._values = values | |
41 | self._makemap = makemap |
|
41 | self._makemap = makemap | |
42 | self.joinfmt = joinfmt |
|
42 | self.joinfmt = joinfmt | |
43 | self.keytype = keytype # hint for 'x in y' where type(x) is unresolved |
|
43 | self.keytype = keytype # hint for 'x in y' where type(x) is unresolved | |
44 | def gen(self): |
|
44 | def gen(self): | |
45 | """Default generator to stringify this as {join(self, ' ')}""" |
|
45 | """Default generator to stringify this as {join(self, ' ')}""" | |
46 | for i, x in enumerate(self._values): |
|
46 | for i, x in enumerate(self._values): | |
47 | if i > 0: |
|
47 | if i > 0: | |
48 | yield ' ' |
|
48 | yield ' ' | |
49 | yield self.joinfmt(x) |
|
49 | yield self.joinfmt(x) | |
50 | def itermaps(self): |
|
50 | def itermaps(self): | |
51 | makemap = self._makemap |
|
51 | makemap = self._makemap | |
52 | for x in self._values: |
|
52 | for x in self._values: | |
53 | yield makemap(x) |
|
53 | yield makemap(x) | |
54 | def __contains__(self, x): |
|
54 | def __contains__(self, x): | |
55 | return x in self._values |
|
55 | return x in self._values | |
56 | def __getitem__(self, key): |
|
56 | def __getitem__(self, key): | |
57 | return self._values[key] |
|
57 | return self._values[key] | |
58 | def __len__(self): |
|
58 | def __len__(self): | |
59 | return len(self._values) |
|
59 | return len(self._values) | |
60 | def __iter__(self): |
|
60 | def __iter__(self): | |
61 | return iter(self._values) |
|
61 | return iter(self._values) | |
62 | def __getattr__(self, name): |
|
62 | def __getattr__(self, name): | |
63 | if name not in (r'get', r'items', r'iteritems', r'iterkeys', |
|
63 | if name not in (r'get', r'items', r'iteritems', r'iterkeys', | |
64 | r'itervalues', r'keys', r'values'): |
|
64 | r'itervalues', r'keys', r'values'): | |
65 | raise AttributeError(name) |
|
65 | raise AttributeError(name) | |
66 | return getattr(self._values, name) |
|
66 | return getattr(self._values, name) | |
67 |
|
67 | |||
68 | class mappable(object): |
|
68 | class mappable(object): | |
69 | """Wrapper for non-list/dict object to support map operation |
|
69 | """Wrapper for non-list/dict object to support map operation | |
70 |
|
70 | |||
71 | This class allows us to handle both: |
|
71 | This class allows us to handle both: | |
72 | - "{manifest}" |
|
72 | - "{manifest}" | |
73 | - "{manifest % '{rev}:{node}'}" |
|
73 | - "{manifest % '{rev}:{node}'}" | |
74 | - "{manifest.rev}" |
|
74 | - "{manifest.rev}" | |
75 |
|
75 | |||
76 | Unlike a hybrid, this does not simulate the behavior of the underling |
|
76 | Unlike a hybrid, this does not simulate the behavior of the underling | |
77 | value. Use unwrapvalue() or unwraphybrid() to obtain the inner object. |
|
77 | value. Use unwrapvalue() or unwraphybrid() to obtain the inner object. | |
78 | """ |
|
78 | """ | |
79 |
|
79 | |||
80 | def __init__(self, gen, key, value, makemap): |
|
80 | def __init__(self, gen, key, value, makemap): | |
81 | if gen is not None: |
|
81 | if gen is not None: | |
82 | self.gen = gen # generator or function returning generator |
|
82 | self.gen = gen # generator or function returning generator | |
83 | self._key = key |
|
83 | self._key = key | |
84 | self._value = value # may be generator of strings |
|
84 | self._value = value # may be generator of strings | |
85 | self._makemap = makemap |
|
85 | self._makemap = makemap | |
86 |
|
86 | |||
87 | def gen(self): |
|
87 | def gen(self): | |
88 | yield pycompat.bytestr(self._value) |
|
88 | yield pycompat.bytestr(self._value) | |
89 |
|
89 | |||
90 | def tomap(self): |
|
90 | def tomap(self): | |
91 | return self._makemap(self._key) |
|
91 | return self._makemap(self._key) | |
92 |
|
92 | |||
93 | def itermaps(self): |
|
93 | def itermaps(self): | |
94 | yield self.tomap() |
|
94 | yield self.tomap() | |
95 |
|
95 | |||
96 | def hybriddict(data, key='key', value='value', fmt=None, gen=None): |
|
96 | def hybriddict(data, key='key', value='value', fmt=None, gen=None): | |
97 | """Wrap data to support both dict-like and string-like operations""" |
|
97 | """Wrap data to support both dict-like and string-like operations""" | |
98 | prefmt = pycompat.identity |
|
98 | prefmt = pycompat.identity | |
99 | if fmt is None: |
|
99 | if fmt is None: | |
100 | fmt = '%s=%s' |
|
100 | fmt = '%s=%s' | |
101 | prefmt = pycompat.bytestr |
|
101 | prefmt = pycompat.bytestr | |
102 | return hybrid(gen, data, lambda k: {key: k, value: data[k]}, |
|
102 | return hybrid(gen, data, lambda k: {key: k, value: data[k]}, | |
103 | lambda k: fmt % (prefmt(k), prefmt(data[k]))) |
|
103 | lambda k: fmt % (prefmt(k), prefmt(data[k]))) | |
104 |
|
104 | |||
105 | def hybridlist(data, name, fmt=None, gen=None): |
|
105 | def hybridlist(data, name, fmt=None, gen=None): | |
106 | """Wrap data to support both list-like and string-like operations""" |
|
106 | """Wrap data to support both list-like and string-like operations""" | |
107 | prefmt = pycompat.identity |
|
107 | prefmt = pycompat.identity | |
108 | if fmt is None: |
|
108 | if fmt is None: | |
109 | fmt = '%s' |
|
109 | fmt = '%s' | |
110 | prefmt = pycompat.bytestr |
|
110 | prefmt = pycompat.bytestr | |
111 | return hybrid(gen, data, lambda x: {name: x}, lambda x: fmt % prefmt(x)) |
|
111 | return hybrid(gen, data, lambda x: {name: x}, lambda x: fmt % prefmt(x)) | |
112 |
|
112 | |||
113 | def unwraphybrid(thing): |
|
113 | def unwraphybrid(thing): | |
114 | """Return an object which can be stringified possibly by using a legacy |
|
114 | """Return an object which can be stringified possibly by using a legacy | |
115 | template""" |
|
115 | template""" | |
116 | gen = getattr(thing, 'gen', None) |
|
116 | gen = getattr(thing, 'gen', None) | |
117 | if gen is None: |
|
117 | if gen is None: | |
118 | return thing |
|
118 | return thing | |
119 | if callable(gen): |
|
119 | if callable(gen): | |
120 | return gen() |
|
120 | return gen() | |
121 | return gen |
|
121 | return gen | |
122 |
|
122 | |||
123 | def unwrapvalue(thing): |
|
123 | def unwrapvalue(thing): | |
124 | """Move the inner value object out of the wrapper""" |
|
124 | """Move the inner value object out of the wrapper""" | |
125 | if not util.safehasattr(thing, '_value'): |
|
125 | if not util.safehasattr(thing, '_value'): | |
126 | return thing |
|
126 | return thing | |
127 | return thing._value |
|
127 | return thing._value | |
128 |
|
128 | |||
129 | def wraphybridvalue(container, key, value): |
|
129 | def wraphybridvalue(container, key, value): | |
130 | """Wrap an element of hybrid container to be mappable |
|
130 | """Wrap an element of hybrid container to be mappable | |
131 |
|
131 | |||
132 | The key is passed to the makemap function of the given container, which |
|
132 | The key is passed to the makemap function of the given container, which | |
133 | should be an item generated by iter(container). |
|
133 | should be an item generated by iter(container). | |
134 | """ |
|
134 | """ | |
135 | makemap = getattr(container, '_makemap', None) |
|
135 | makemap = getattr(container, '_makemap', None) | |
136 | if makemap is None: |
|
136 | if makemap is None: | |
137 | return value |
|
137 | return value | |
138 | if util.safehasattr(value, '_makemap'): |
|
138 | if util.safehasattr(value, '_makemap'): | |
139 | # a nested hybrid list/dict, which has its own way of map operation |
|
139 | # a nested hybrid list/dict, which has its own way of map operation | |
140 | return value |
|
140 | return value | |
141 | return mappable(None, key, value, makemap) |
|
141 | return mappable(None, key, value, makemap) | |
142 |
|
142 | |||
143 | def compatdict(context, mapping, name, data, key='key', value='value', |
|
143 | def compatdict(context, mapping, name, data, key='key', value='value', | |
144 | fmt=None, plural=None, separator=' '): |
|
144 | fmt=None, plural=None, separator=' '): | |
145 | """Wrap data like hybriddict(), but also supports old-style list template |
|
145 | """Wrap data like hybriddict(), but also supports old-style list template | |
146 |
|
146 | |||
147 | This exists for backward compatibility with the old-style template. Use |
|
147 | This exists for backward compatibility with the old-style template. Use | |
148 | hybriddict() for new template keywords. |
|
148 | hybriddict() for new template keywords. | |
149 | """ |
|
149 | """ | |
150 | c = [{key: k, value: v} for k, v in data.iteritems()] |
|
150 | c = [{key: k, value: v} for k, v in data.iteritems()] | |
151 | t = context.resource(mapping, 'templ') |
|
151 | t = context.resource(mapping, 'templ') | |
152 | f = _showlist(name, c, t, mapping, plural, separator) |
|
152 | f = _showlist(name, c, t, mapping, plural, separator) | |
153 | return hybriddict(data, key=key, value=value, fmt=fmt, gen=f) |
|
153 | return hybriddict(data, key=key, value=value, fmt=fmt, gen=f) | |
154 |
|
154 | |||
155 | def compatlist(context, mapping, name, data, element=None, fmt=None, |
|
155 | def compatlist(context, mapping, name, data, element=None, fmt=None, | |
156 | plural=None, separator=' '): |
|
156 | plural=None, separator=' '): | |
157 | """Wrap data like hybridlist(), but also supports old-style list template |
|
157 | """Wrap data like hybridlist(), but also supports old-style list template | |
158 |
|
158 | |||
159 | This exists for backward compatibility with the old-style template. Use |
|
159 | This exists for backward compatibility with the old-style template. Use | |
160 | hybridlist() for new template keywords. |
|
160 | hybridlist() for new template keywords. | |
161 | """ |
|
161 | """ | |
162 | t = context.resource(mapping, 'templ') |
|
162 | t = context.resource(mapping, 'templ') | |
163 | f = _showlist(name, data, t, mapping, plural, separator) |
|
163 | f = _showlist(name, data, t, mapping, plural, separator) | |
164 | return hybridlist(data, name=element or name, fmt=fmt, gen=f) |
|
164 | return hybridlist(data, name=element or name, fmt=fmt, gen=f) | |
165 |
|
165 | |||
166 | def _showlist(name, values, templ, mapping, plural=None, separator=' '): |
|
166 | def _showlist(name, values, templ, mapping, plural=None, separator=' '): | |
167 | '''expand set of values. |
|
167 | '''expand set of values. | |
168 | name is name of key in template map. |
|
168 | name is name of key in template map. | |
169 | values is list of strings or dicts. |
|
169 | values is list of strings or dicts. | |
170 | plural is plural of name, if not simply name + 's'. |
|
170 | plural is plural of name, if not simply name + 's'. | |
171 | separator is used to join values as a string |
|
171 | separator is used to join values as a string | |
172 |
|
172 | |||
173 | expansion works like this, given name 'foo'. |
|
173 | expansion works like this, given name 'foo'. | |
174 |
|
174 | |||
175 | if values is empty, expand 'no_foos'. |
|
175 | if values is empty, expand 'no_foos'. | |
176 |
|
176 | |||
177 | if 'foo' not in template map, return values as a string, |
|
177 | if 'foo' not in template map, return values as a string, | |
178 | joined by 'separator'. |
|
178 | joined by 'separator'. | |
179 |
|
179 | |||
180 | expand 'start_foos'. |
|
180 | expand 'start_foos'. | |
181 |
|
181 | |||
182 | for each value, expand 'foo'. if 'last_foo' in template |
|
182 | for each value, expand 'foo'. if 'last_foo' in template | |
183 | map, expand it instead of 'foo' for last key. |
|
183 | map, expand it instead of 'foo' for last key. | |
184 |
|
184 | |||
185 | expand 'end_foos'. |
|
185 | expand 'end_foos'. | |
186 | ''' |
|
186 | ''' | |
187 | strmapping = pycompat.strkwargs(mapping) |
|
|||
188 | if not plural: |
|
187 | if not plural: | |
189 | plural = name + 's' |
|
188 | plural = name + 's' | |
190 | if not values: |
|
189 | if not values: | |
191 | noname = 'no_' + plural |
|
190 | noname = 'no_' + plural | |
192 | if noname in templ: |
|
191 | if noname in templ: | |
193 |
yield templ(noname, |
|
192 | yield templ.generate(noname, mapping) | |
194 | return |
|
193 | return | |
195 | if name not in templ: |
|
194 | if name not in templ: | |
196 | if isinstance(values[0], bytes): |
|
195 | if isinstance(values[0], bytes): | |
197 | yield separator.join(values) |
|
196 | yield separator.join(values) | |
198 | else: |
|
197 | else: | |
199 | for v in values: |
|
198 | for v in values: | |
200 | r = dict(v) |
|
199 | r = dict(v) | |
201 | r.update(mapping) |
|
200 | r.update(mapping) | |
202 | yield r |
|
201 | yield r | |
203 | return |
|
202 | return | |
204 | startname = 'start_' + plural |
|
203 | startname = 'start_' + plural | |
205 | if startname in templ: |
|
204 | if startname in templ: | |
206 |
yield templ(startname, |
|
205 | yield templ.generate(startname, mapping) | |
207 | vmapping = mapping.copy() |
|
206 | vmapping = mapping.copy() | |
208 | def one(v, tag=name): |
|
207 | def one(v, tag=name): | |
209 | try: |
|
208 | try: | |
210 | vmapping.update(v) |
|
209 | vmapping.update(v) | |
211 | # Python 2 raises ValueError if the type of v is wrong. Python |
|
210 | # Python 2 raises ValueError if the type of v is wrong. Python | |
212 | # 3 raises TypeError. |
|
211 | # 3 raises TypeError. | |
213 | except (AttributeError, TypeError, ValueError): |
|
212 | except (AttributeError, TypeError, ValueError): | |
214 | try: |
|
213 | try: | |
215 | # Python 2 raises ValueError trying to destructure an e.g. |
|
214 | # Python 2 raises ValueError trying to destructure an e.g. | |
216 | # bytes. Python 3 raises TypeError. |
|
215 | # bytes. Python 3 raises TypeError. | |
217 | for a, b in v: |
|
216 | for a, b in v: | |
218 | vmapping[a] = b |
|
217 | vmapping[a] = b | |
219 | except (TypeError, ValueError): |
|
218 | except (TypeError, ValueError): | |
220 | vmapping[name] = v |
|
219 | vmapping[name] = v | |
221 |
return templ(tag, |
|
220 | return templ.generate(tag, vmapping) | |
222 | lastname = 'last_' + name |
|
221 | lastname = 'last_' + name | |
223 | if lastname in templ: |
|
222 | if lastname in templ: | |
224 | last = values.pop() |
|
223 | last = values.pop() | |
225 | else: |
|
224 | else: | |
226 | last = None |
|
225 | last = None | |
227 | for v in values: |
|
226 | for v in values: | |
228 | yield one(v) |
|
227 | yield one(v) | |
229 | if last is not None: |
|
228 | if last is not None: | |
230 | yield one(last, tag=lastname) |
|
229 | yield one(last, tag=lastname) | |
231 | endname = 'end_' + plural |
|
230 | endname = 'end_' + plural | |
232 | if endname in templ: |
|
231 | if endname in templ: | |
233 |
yield templ(endname, |
|
232 | yield templ.generate(endname, mapping) | |
234 |
|
233 | |||
235 | def stringify(thing): |
|
234 | def stringify(thing): | |
236 | """Turn values into bytes by converting into text and concatenating them""" |
|
235 | """Turn values into bytes by converting into text and concatenating them""" | |
237 | thing = unwraphybrid(thing) |
|
236 | thing = unwraphybrid(thing) | |
238 | if util.safehasattr(thing, '__iter__') and not isinstance(thing, bytes): |
|
237 | if util.safehasattr(thing, '__iter__') and not isinstance(thing, bytes): | |
239 | if isinstance(thing, str): |
|
238 | if isinstance(thing, str): | |
240 | # This is only reachable on Python 3 (otherwise |
|
239 | # This is only reachable on Python 3 (otherwise | |
241 | # isinstance(thing, bytes) would have been true), and is |
|
240 | # isinstance(thing, bytes) would have been true), and is | |
242 | # here to prevent infinite recursion bugs on Python 3. |
|
241 | # here to prevent infinite recursion bugs on Python 3. | |
243 | raise error.ProgrammingError( |
|
242 | raise error.ProgrammingError( | |
244 | 'stringify got unexpected unicode string: %r' % thing) |
|
243 | 'stringify got unexpected unicode string: %r' % thing) | |
245 | return "".join([stringify(t) for t in thing if t is not None]) |
|
244 | return "".join([stringify(t) for t in thing if t is not None]) | |
246 | if thing is None: |
|
245 | if thing is None: | |
247 | return "" |
|
246 | return "" | |
248 | return pycompat.bytestr(thing) |
|
247 | return pycompat.bytestr(thing) | |
249 |
|
248 | |||
250 | def findsymbolicname(arg): |
|
249 | def findsymbolicname(arg): | |
251 | """Find symbolic name for the given compiled expression; returns None |
|
250 | """Find symbolic name for the given compiled expression; returns None | |
252 | if nothing found reliably""" |
|
251 | if nothing found reliably""" | |
253 | while True: |
|
252 | while True: | |
254 | func, data = arg |
|
253 | func, data = arg | |
255 | if func is runsymbol: |
|
254 | if func is runsymbol: | |
256 | return data |
|
255 | return data | |
257 | elif func is runfilter: |
|
256 | elif func is runfilter: | |
258 | arg = data[0] |
|
257 | arg = data[0] | |
259 | else: |
|
258 | else: | |
260 | return None |
|
259 | return None | |
261 |
|
260 | |||
262 | def evalrawexp(context, mapping, arg): |
|
261 | def evalrawexp(context, mapping, arg): | |
263 | """Evaluate given argument as a bare template object which may require |
|
262 | """Evaluate given argument as a bare template object which may require | |
264 | further processing (such as folding generator of strings)""" |
|
263 | further processing (such as folding generator of strings)""" | |
265 | func, data = arg |
|
264 | func, data = arg | |
266 | return func(context, mapping, data) |
|
265 | return func(context, mapping, data) | |
267 |
|
266 | |||
268 | def evalfuncarg(context, mapping, arg): |
|
267 | def evalfuncarg(context, mapping, arg): | |
269 | """Evaluate given argument as value type""" |
|
268 | """Evaluate given argument as value type""" | |
270 | thing = evalrawexp(context, mapping, arg) |
|
269 | thing = evalrawexp(context, mapping, arg) | |
271 | thing = unwrapvalue(thing) |
|
270 | thing = unwrapvalue(thing) | |
272 | # evalrawexp() may return string, generator of strings or arbitrary object |
|
271 | # evalrawexp() may return string, generator of strings or arbitrary object | |
273 | # such as date tuple, but filter does not want generator. |
|
272 | # such as date tuple, but filter does not want generator. | |
274 | if isinstance(thing, types.GeneratorType): |
|
273 | if isinstance(thing, types.GeneratorType): | |
275 | thing = stringify(thing) |
|
274 | thing = stringify(thing) | |
276 | return thing |
|
275 | return thing | |
277 |
|
276 | |||
278 | def evalboolean(context, mapping, arg): |
|
277 | def evalboolean(context, mapping, arg): | |
279 | """Evaluate given argument as boolean, but also takes boolean literals""" |
|
278 | """Evaluate given argument as boolean, but also takes boolean literals""" | |
280 | func, data = arg |
|
279 | func, data = arg | |
281 | if func is runsymbol: |
|
280 | if func is runsymbol: | |
282 | thing = func(context, mapping, data, default=None) |
|
281 | thing = func(context, mapping, data, default=None) | |
283 | if thing is None: |
|
282 | if thing is None: | |
284 | # not a template keyword, takes as a boolean literal |
|
283 | # not a template keyword, takes as a boolean literal | |
285 | thing = util.parsebool(data) |
|
284 | thing = util.parsebool(data) | |
286 | else: |
|
285 | else: | |
287 | thing = func(context, mapping, data) |
|
286 | thing = func(context, mapping, data) | |
288 | thing = unwrapvalue(thing) |
|
287 | thing = unwrapvalue(thing) | |
289 | if isinstance(thing, bool): |
|
288 | if isinstance(thing, bool): | |
290 | return thing |
|
289 | return thing | |
291 | # other objects are evaluated as strings, which means 0 is True, but |
|
290 | # other objects are evaluated as strings, which means 0 is True, but | |
292 | # empty dict/list should be False as they are expected to be '' |
|
291 | # empty dict/list should be False as they are expected to be '' | |
293 | return bool(stringify(thing)) |
|
292 | return bool(stringify(thing)) | |
294 |
|
293 | |||
295 | def evalinteger(context, mapping, arg, err=None): |
|
294 | def evalinteger(context, mapping, arg, err=None): | |
296 | v = evalfuncarg(context, mapping, arg) |
|
295 | v = evalfuncarg(context, mapping, arg) | |
297 | try: |
|
296 | try: | |
298 | return int(v) |
|
297 | return int(v) | |
299 | except (TypeError, ValueError): |
|
298 | except (TypeError, ValueError): | |
300 | raise error.ParseError(err or _('not an integer')) |
|
299 | raise error.ParseError(err or _('not an integer')) | |
301 |
|
300 | |||
302 | def evalstring(context, mapping, arg): |
|
301 | def evalstring(context, mapping, arg): | |
303 | return stringify(evalrawexp(context, mapping, arg)) |
|
302 | return stringify(evalrawexp(context, mapping, arg)) | |
304 |
|
303 | |||
305 | def evalstringliteral(context, mapping, arg): |
|
304 | def evalstringliteral(context, mapping, arg): | |
306 | """Evaluate given argument as string template, but returns symbol name |
|
305 | """Evaluate given argument as string template, but returns symbol name | |
307 | if it is unknown""" |
|
306 | if it is unknown""" | |
308 | func, data = arg |
|
307 | func, data = arg | |
309 | if func is runsymbol: |
|
308 | if func is runsymbol: | |
310 | thing = func(context, mapping, data, default=data) |
|
309 | thing = func(context, mapping, data, default=data) | |
311 | else: |
|
310 | else: | |
312 | thing = func(context, mapping, data) |
|
311 | thing = func(context, mapping, data) | |
313 | return stringify(thing) |
|
312 | return stringify(thing) | |
314 |
|
313 | |||
315 | _evalfuncbytype = { |
|
314 | _evalfuncbytype = { | |
316 | bool: evalboolean, |
|
315 | bool: evalboolean, | |
317 | bytes: evalstring, |
|
316 | bytes: evalstring, | |
318 | int: evalinteger, |
|
317 | int: evalinteger, | |
319 | } |
|
318 | } | |
320 |
|
319 | |||
321 | def evalastype(context, mapping, arg, typ): |
|
320 | def evalastype(context, mapping, arg, typ): | |
322 | """Evaluate given argument and coerce its type""" |
|
321 | """Evaluate given argument and coerce its type""" | |
323 | try: |
|
322 | try: | |
324 | f = _evalfuncbytype[typ] |
|
323 | f = _evalfuncbytype[typ] | |
325 | except KeyError: |
|
324 | except KeyError: | |
326 | raise error.ProgrammingError('invalid type specified: %r' % typ) |
|
325 | raise error.ProgrammingError('invalid type specified: %r' % typ) | |
327 | return f(context, mapping, arg) |
|
326 | return f(context, mapping, arg) | |
328 |
|
327 | |||
329 | def runinteger(context, mapping, data): |
|
328 | def runinteger(context, mapping, data): | |
330 | return int(data) |
|
329 | return int(data) | |
331 |
|
330 | |||
332 | def runstring(context, mapping, data): |
|
331 | def runstring(context, mapping, data): | |
333 | return data |
|
332 | return data | |
334 |
|
333 | |||
335 | def _recursivesymbolblocker(key): |
|
334 | def _recursivesymbolblocker(key): | |
336 | def showrecursion(**args): |
|
335 | def showrecursion(**args): | |
337 | raise error.Abort(_("recursive reference '%s' in template") % key) |
|
336 | raise error.Abort(_("recursive reference '%s' in template") % key) | |
338 | return showrecursion |
|
337 | return showrecursion | |
339 |
|
338 | |||
340 | def runsymbol(context, mapping, key, default=''): |
|
339 | def runsymbol(context, mapping, key, default=''): | |
341 | v = context.symbol(mapping, key) |
|
340 | v = context.symbol(mapping, key) | |
342 | if v is None: |
|
341 | if v is None: | |
343 | # put poison to cut recursion. we can't move this to parsing phase |
|
342 | # put poison to cut recursion. we can't move this to parsing phase | |
344 | # because "x = {x}" is allowed if "x" is a keyword. (issue4758) |
|
343 | # because "x = {x}" is allowed if "x" is a keyword. (issue4758) | |
345 | safemapping = mapping.copy() |
|
344 | safemapping = mapping.copy() | |
346 | safemapping[key] = _recursivesymbolblocker(key) |
|
345 | safemapping[key] = _recursivesymbolblocker(key) | |
347 | try: |
|
346 | try: | |
348 | v = context.process(key, safemapping) |
|
347 | v = context.process(key, safemapping) | |
349 | except TemplateNotFound: |
|
348 | except TemplateNotFound: | |
350 | v = default |
|
349 | v = default | |
351 | if callable(v) and getattr(v, '_requires', None) is None: |
|
350 | if callable(v) and getattr(v, '_requires', None) is None: | |
352 | # old templatekw: expand all keywords and resources |
|
351 | # old templatekw: expand all keywords and resources | |
353 | props = {k: f(context, mapping, k) |
|
352 | props = {k: f(context, mapping, k) | |
354 | for k, f in context._resources.items()} |
|
353 | for k, f in context._resources.items()} | |
355 | props.update(mapping) |
|
354 | props.update(mapping) | |
356 | return v(**pycompat.strkwargs(props)) |
|
355 | return v(**pycompat.strkwargs(props)) | |
357 | if callable(v): |
|
356 | if callable(v): | |
358 | # new templatekw |
|
357 | # new templatekw | |
359 | try: |
|
358 | try: | |
360 | return v(context, mapping) |
|
359 | return v(context, mapping) | |
361 | except ResourceUnavailable: |
|
360 | except ResourceUnavailable: | |
362 | # unsupported keyword is mapped to empty just like unknown keyword |
|
361 | # unsupported keyword is mapped to empty just like unknown keyword | |
363 | return None |
|
362 | return None | |
364 | return v |
|
363 | return v | |
365 |
|
364 | |||
366 | def runtemplate(context, mapping, template): |
|
365 | def runtemplate(context, mapping, template): | |
367 | for arg in template: |
|
366 | for arg in template: | |
368 | yield evalrawexp(context, mapping, arg) |
|
367 | yield evalrawexp(context, mapping, arg) | |
369 |
|
368 | |||
370 | def runfilter(context, mapping, data): |
|
369 | def runfilter(context, mapping, data): | |
371 | arg, filt = data |
|
370 | arg, filt = data | |
372 | thing = evalfuncarg(context, mapping, arg) |
|
371 | thing = evalfuncarg(context, mapping, arg) | |
373 | try: |
|
372 | try: | |
374 | return filt(thing) |
|
373 | return filt(thing) | |
375 | except (ValueError, AttributeError, TypeError): |
|
374 | except (ValueError, AttributeError, TypeError): | |
376 | sym = findsymbolicname(arg) |
|
375 | sym = findsymbolicname(arg) | |
377 | if sym: |
|
376 | if sym: | |
378 | msg = (_("template filter '%s' is not compatible with keyword '%s'") |
|
377 | msg = (_("template filter '%s' is not compatible with keyword '%s'") | |
379 | % (pycompat.sysbytes(filt.__name__), sym)) |
|
378 | % (pycompat.sysbytes(filt.__name__), sym)) | |
380 | else: |
|
379 | else: | |
381 | msg = (_("incompatible use of template filter '%s'") |
|
380 | msg = (_("incompatible use of template filter '%s'") | |
382 | % pycompat.sysbytes(filt.__name__)) |
|
381 | % pycompat.sysbytes(filt.__name__)) | |
383 | raise error.Abort(msg) |
|
382 | raise error.Abort(msg) | |
384 |
|
383 | |||
385 | def runmap(context, mapping, data): |
|
384 | def runmap(context, mapping, data): | |
386 | darg, targ = data |
|
385 | darg, targ = data | |
387 | d = evalrawexp(context, mapping, darg) |
|
386 | d = evalrawexp(context, mapping, darg) | |
388 | if util.safehasattr(d, 'itermaps'): |
|
387 | if util.safehasattr(d, 'itermaps'): | |
389 | diter = d.itermaps() |
|
388 | diter = d.itermaps() | |
390 | else: |
|
389 | else: | |
391 | try: |
|
390 | try: | |
392 | diter = iter(d) |
|
391 | diter = iter(d) | |
393 | except TypeError: |
|
392 | except TypeError: | |
394 | sym = findsymbolicname(darg) |
|
393 | sym = findsymbolicname(darg) | |
395 | if sym: |
|
394 | if sym: | |
396 | raise error.ParseError(_("keyword '%s' is not iterable") % sym) |
|
395 | raise error.ParseError(_("keyword '%s' is not iterable") % sym) | |
397 | else: |
|
396 | else: | |
398 | raise error.ParseError(_("%r is not iterable") % d) |
|
397 | raise error.ParseError(_("%r is not iterable") % d) | |
399 |
|
398 | |||
400 | for i, v in enumerate(diter): |
|
399 | for i, v in enumerate(diter): | |
401 | lm = mapping.copy() |
|
400 | lm = mapping.copy() | |
402 | lm['index'] = i |
|
401 | lm['index'] = i | |
403 | if isinstance(v, dict): |
|
402 | if isinstance(v, dict): | |
404 | lm.update(v) |
|
403 | lm.update(v) | |
405 | lm['originalnode'] = mapping.get('node') |
|
404 | lm['originalnode'] = mapping.get('node') | |
406 | yield evalrawexp(context, lm, targ) |
|
405 | yield evalrawexp(context, lm, targ) | |
407 | else: |
|
406 | else: | |
408 | # v is not an iterable of dicts, this happen when 'key' |
|
407 | # v is not an iterable of dicts, this happen when 'key' | |
409 | # has been fully expanded already and format is useless. |
|
408 | # has been fully expanded already and format is useless. | |
410 | # If so, return the expanded value. |
|
409 | # If so, return the expanded value. | |
411 | yield v |
|
410 | yield v | |
412 |
|
411 | |||
413 | def runmember(context, mapping, data): |
|
412 | def runmember(context, mapping, data): | |
414 | darg, memb = data |
|
413 | darg, memb = data | |
415 | d = evalrawexp(context, mapping, darg) |
|
414 | d = evalrawexp(context, mapping, darg) | |
416 | if util.safehasattr(d, 'tomap'): |
|
415 | if util.safehasattr(d, 'tomap'): | |
417 | lm = mapping.copy() |
|
416 | lm = mapping.copy() | |
418 | lm.update(d.tomap()) |
|
417 | lm.update(d.tomap()) | |
419 | return runsymbol(context, lm, memb) |
|
418 | return runsymbol(context, lm, memb) | |
420 | if util.safehasattr(d, 'get'): |
|
419 | if util.safehasattr(d, 'get'): | |
421 | return getdictitem(d, memb) |
|
420 | return getdictitem(d, memb) | |
422 |
|
421 | |||
423 | sym = findsymbolicname(darg) |
|
422 | sym = findsymbolicname(darg) | |
424 | if sym: |
|
423 | if sym: | |
425 | raise error.ParseError(_("keyword '%s' has no member") % sym) |
|
424 | raise error.ParseError(_("keyword '%s' has no member") % sym) | |
426 | else: |
|
425 | else: | |
427 | raise error.ParseError(_("%r has no member") % pycompat.bytestr(d)) |
|
426 | raise error.ParseError(_("%r has no member") % pycompat.bytestr(d)) | |
428 |
|
427 | |||
429 | def runnegate(context, mapping, data): |
|
428 | def runnegate(context, mapping, data): | |
430 | data = evalinteger(context, mapping, data, |
|
429 | data = evalinteger(context, mapping, data, | |
431 | _('negation needs an integer argument')) |
|
430 | _('negation needs an integer argument')) | |
432 | return -data |
|
431 | return -data | |
433 |
|
432 | |||
434 | def runarithmetic(context, mapping, data): |
|
433 | def runarithmetic(context, mapping, data): | |
435 | func, left, right = data |
|
434 | func, left, right = data | |
436 | left = evalinteger(context, mapping, left, |
|
435 | left = evalinteger(context, mapping, left, | |
437 | _('arithmetic only defined on integers')) |
|
436 | _('arithmetic only defined on integers')) | |
438 | right = evalinteger(context, mapping, right, |
|
437 | right = evalinteger(context, mapping, right, | |
439 | _('arithmetic only defined on integers')) |
|
438 | _('arithmetic only defined on integers')) | |
440 | try: |
|
439 | try: | |
441 | return func(left, right) |
|
440 | return func(left, right) | |
442 | except ZeroDivisionError: |
|
441 | except ZeroDivisionError: | |
443 | raise error.Abort(_('division by zero is not defined')) |
|
442 | raise error.Abort(_('division by zero is not defined')) | |
444 |
|
443 | |||
445 | def getdictitem(dictarg, key): |
|
444 | def getdictitem(dictarg, key): | |
446 | val = dictarg.get(key) |
|
445 | val = dictarg.get(key) | |
447 | if val is None: |
|
446 | if val is None: | |
448 | return |
|
447 | return | |
449 | return wraphybridvalue(dictarg, key, val) |
|
448 | return wraphybridvalue(dictarg, key, val) |
General Comments 0
You need to be logged in to leave comments.
Login now