Show More
@@ -1,811 +1,814 b'' | |||
|
1 | 1 | # keyword.py - $Keyword$ expansion for Mercurial |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2007-2015 Christian Ebert <blacktrash@gmx.net> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | # |
|
8 | 8 | # $Id$ |
|
9 | 9 | # |
|
10 | 10 | # Keyword expansion hack against the grain of a Distributed SCM |
|
11 | 11 | # |
|
12 | 12 | # There are many good reasons why this is not needed in a distributed |
|
13 | 13 | # SCM, still it may be useful in very small projects based on single |
|
14 | 14 | # files (like LaTeX packages), that are mostly addressed to an |
|
15 | 15 | # audience not running a version control system. |
|
16 | 16 | # |
|
17 | 17 | # For in-depth discussion refer to |
|
18 | 18 | # <https://mercurial-scm.org/wiki/KeywordPlan>. |
|
19 | 19 | # |
|
20 | 20 | # Keyword expansion is based on Mercurial's changeset template mappings. |
|
21 | 21 | # |
|
22 | 22 | # Binary files are not touched. |
|
23 | 23 | # |
|
24 | 24 | # Files to act upon/ignore are specified in the [keyword] section. |
|
25 | 25 | # Customized keyword template mappings in the [keywordmaps] section. |
|
26 | 26 | # |
|
27 | 27 | # Run 'hg help keyword' and 'hg kwdemo' to get info on configuration. |
|
28 | 28 | |
|
29 | 29 | '''expand keywords in tracked files |
|
30 | 30 | |
|
31 | 31 | This extension expands RCS/CVS-like or self-customized $Keywords$ in |
|
32 | 32 | tracked text files selected by your configuration. |
|
33 | 33 | |
|
34 | 34 | Keywords are only expanded in local repositories and not stored in the |
|
35 | 35 | change history. The mechanism can be regarded as a convenience for the |
|
36 | 36 | current user or for archive distribution. |
|
37 | 37 | |
|
38 | 38 | Keywords expand to the changeset data pertaining to the latest change |
|
39 | 39 | relative to the working directory parent of each file. |
|
40 | 40 | |
|
41 | 41 | Configuration is done in the [keyword], [keywordset] and [keywordmaps] |
|
42 | 42 | sections of hgrc files. |
|
43 | 43 | |
|
44 | 44 | Example:: |
|
45 | 45 | |
|
46 | 46 | [keyword] |
|
47 | 47 | # expand keywords in every python file except those matching "x*" |
|
48 | 48 | **.py = |
|
49 | 49 | x* = ignore |
|
50 | 50 | |
|
51 | 51 | [keywordset] |
|
52 | 52 | # prefer svn- over cvs-like default keywordmaps |
|
53 | 53 | svn = True |
|
54 | 54 | |
|
55 | 55 | .. note:: |
|
56 | 56 | |
|
57 | 57 | The more specific you are in your filename patterns the less you |
|
58 | 58 | lose speed in huge repositories. |
|
59 | 59 | |
|
60 | 60 | For [keywordmaps] template mapping and expansion demonstration and |
|
61 | 61 | control run :hg:`kwdemo`. See :hg:`help templates` for a list of |
|
62 | 62 | available templates and filters. |
|
63 | 63 | |
|
64 | 64 | Three additional date template filters are provided: |
|
65 | 65 | |
|
66 | 66 | :``utcdate``: "2006/09/18 15:13:13" |
|
67 | 67 | :``svnutcdate``: "2006-09-18 15:13:13Z" |
|
68 | 68 | :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)" |
|
69 | 69 | |
|
70 | 70 | The default template mappings (view with :hg:`kwdemo -d`) can be |
|
71 | 71 | replaced with customized keywords and templates. Again, run |
|
72 | 72 | :hg:`kwdemo` to control the results of your configuration changes. |
|
73 | 73 | |
|
74 | 74 | Before changing/disabling active keywords, you must run :hg:`kwshrink` |
|
75 | 75 | to avoid storing expanded keywords in the change history. |
|
76 | 76 | |
|
77 | 77 | To force expansion after enabling it, or a configuration change, run |
|
78 | 78 | :hg:`kwexpand`. |
|
79 | 79 | |
|
80 | 80 | Expansions spanning more than one line and incremental expansions, |
|
81 | 81 | like CVS' $Log$, are not supported. A keyword template map "Log = |
|
82 | 82 | {desc}" expands to the first line of the changeset description. |
|
83 | 83 | ''' |
|
84 | 84 | |
|
85 | 85 | |
|
86 | 86 | from __future__ import absolute_import |
|
87 | 87 | |
|
88 | 88 | import os |
|
89 | 89 | import re |
|
90 | 90 | import tempfile |
|
91 | 91 | import weakref |
|
92 | 92 | |
|
93 | 93 | from mercurial.i18n import _ |
|
94 | 94 | from mercurial.hgweb import webcommands |
|
95 | 95 | |
|
96 | 96 | from mercurial import ( |
|
97 | 97 | cmdutil, |
|
98 | 98 | context, |
|
99 | 99 | dispatch, |
|
100 | 100 | error, |
|
101 | 101 | extensions, |
|
102 | 102 | filelog, |
|
103 | 103 | localrepo, |
|
104 | 104 | logcmdutil, |
|
105 | 105 | match, |
|
106 | 106 | patch, |
|
107 | 107 | pathutil, |
|
108 | 108 | pycompat, |
|
109 | 109 | registrar, |
|
110 | 110 | scmutil, |
|
111 | 111 | templatefilters, |
|
112 | 112 | util, |
|
113 | 113 | ) |
|
114 | 114 | from mercurial.utils import dateutil |
|
115 | 115 | |
|
116 | 116 | cmdtable = {} |
|
117 | 117 | command = registrar.command(cmdtable) |
|
118 | 118 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for |
|
119 | 119 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should |
|
120 | 120 | # be specifying the version(s) of Mercurial they are tested with, or |
|
121 | 121 | # leave the attribute unspecified. |
|
122 | 122 | testedwith = 'ships-with-hg-core' |
|
123 | 123 | |
|
124 | 124 | # hg commands that do not act on keywords |
|
125 | 125 | nokwcommands = ('add addremove annotate bundle export grep incoming init log' |
|
126 | 126 | ' outgoing push tip verify convert email glog') |
|
127 | 127 | |
|
128 | 128 | # webcommands that do not act on keywords |
|
129 | 129 | nokwwebcommands = ('annotate changeset rev filediff diff comparison') |
|
130 | 130 | |
|
131 | 131 | # hg commands that trigger expansion only when writing to working dir, |
|
132 | 132 | # not when reading filelog, and unexpand when reading from working dir |
|
133 | 133 | restricted = ('merge kwexpand kwshrink record qrecord resolve transplant' |
|
134 | 134 | ' unshelve rebase graft backout histedit fetch') |
|
135 | 135 | |
|
136 | 136 | # names of extensions using dorecord |
|
137 | 137 | recordextensions = 'record' |
|
138 | 138 | |
|
139 | 139 | colortable = { |
|
140 | 140 | 'kwfiles.enabled': 'green bold', |
|
141 | 141 | 'kwfiles.deleted': 'cyan bold underline', |
|
142 | 142 | 'kwfiles.enabledunknown': 'green', |
|
143 | 143 | 'kwfiles.ignored': 'bold', |
|
144 | 144 | 'kwfiles.ignoredunknown': 'none' |
|
145 | 145 | } |
|
146 | 146 | |
|
147 | 147 | templatefilter = registrar.templatefilter() |
|
148 | 148 | |
|
149 | 149 | configtable = {} |
|
150 | 150 | configitem = registrar.configitem(configtable) |
|
151 | 151 | |
|
152 | 152 | configitem('keywordset', 'svn', |
|
153 | 153 | default=False, |
|
154 | 154 | ) |
|
155 | 155 | # date like in cvs' $Date |
|
156 | 156 | @templatefilter('utcdate') |
|
157 | 157 | def utcdate(text): |
|
158 | 158 | '''Date. Returns a UTC-date in this format: "2009/08/18 11:00:13". |
|
159 | 159 | ''' |
|
160 | 160 | dateformat = '%Y/%m/%d %H:%M:%S' |
|
161 | 161 | return dateutil.datestr((dateutil.parsedate(text)[0], 0), dateformat) |
|
162 | 162 | # date like in svn's $Date |
|
163 | 163 | @templatefilter('svnisodate') |
|
164 | 164 | def svnisodate(text): |
|
165 | 165 | '''Date. Returns a date in this format: "2009-08-18 13:00:13 |
|
166 | 166 | +0200 (Tue, 18 Aug 2009)". |
|
167 | 167 | ''' |
|
168 | 168 | return dateutil.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)') |
|
169 | 169 | # date like in svn's $Id |
|
170 | 170 | @templatefilter('svnutcdate') |
|
171 | 171 | def svnutcdate(text): |
|
172 | 172 | '''Date. Returns a UTC-date in this format: "2009-08-18 |
|
173 | 173 | 11:00:13Z". |
|
174 | 174 | ''' |
|
175 | 175 | dateformat = '%Y-%m-%d %H:%M:%SZ' |
|
176 | 176 | return dateutil.datestr((dateutil.parsedate(text)[0], 0), dateformat) |
|
177 | 177 | |
|
178 | 178 | # make keyword tools accessible |
|
179 | 179 | kwtools = {'hgcmd': ''} |
|
180 | 180 | |
|
181 | 181 | def _defaultkwmaps(ui): |
|
182 | 182 | '''Returns default keywordmaps according to keywordset configuration.''' |
|
183 | 183 | templates = { |
|
184 | 184 | 'Revision': '{node|short}', |
|
185 | 185 | 'Author': '{author|user}', |
|
186 | 186 | } |
|
187 | 187 | kwsets = ({ |
|
188 | 188 | 'Date': '{date|utcdate}', |
|
189 | 189 | 'RCSfile': '{file|basename},v', |
|
190 | 190 | 'RCSFile': '{file|basename},v', # kept for backwards compatibility |
|
191 | 191 | # with hg-keyword |
|
192 | 192 | 'Source': '{root}/{file},v', |
|
193 | 193 | 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}', |
|
194 | 194 | 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}', |
|
195 | 195 | }, { |
|
196 | 196 | 'Date': '{date|svnisodate}', |
|
197 | 197 | 'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}', |
|
198 | 198 | 'LastChangedRevision': '{node|short}', |
|
199 | 199 | 'LastChangedBy': '{author|user}', |
|
200 | 200 | 'LastChangedDate': '{date|svnisodate}', |
|
201 | 201 | }) |
|
202 | 202 | templates.update(kwsets[ui.configbool('keywordset', 'svn')]) |
|
203 | 203 | return templates |
|
204 | 204 | |
|
205 | 205 | def _shrinktext(text, subfunc): |
|
206 | 206 | '''Helper for keyword expansion removal in text. |
|
207 | 207 | Depending on subfunc also returns number of substitutions.''' |
|
208 | 208 | return subfunc(r'$\1$', text) |
|
209 | 209 | |
|
210 | 210 | def _preselect(wstatus, changed): |
|
211 | 211 | '''Retrieves modified and added files from a working directory state |
|
212 | 212 | and returns the subset of each contained in given changed files |
|
213 | 213 | retrieved from a change context.''' |
|
214 | 214 | modified = [f for f in wstatus.modified if f in changed] |
|
215 | 215 | added = [f for f in wstatus.added if f in changed] |
|
216 | 216 | return modified, added |
|
217 | 217 | |
|
218 | 218 | |
|
219 | 219 | class kwtemplater(object): |
|
220 | 220 | ''' |
|
221 | 221 | Sets up keyword templates, corresponding keyword regex, and |
|
222 | 222 | provides keyword substitution functions. |
|
223 | 223 | ''' |
|
224 | 224 | |
|
225 | 225 | def __init__(self, ui, repo, inc, exc): |
|
226 | 226 | self.ui = ui |
|
227 | 227 | self._repo = weakref.ref(repo) |
|
228 | 228 | self.match = match.match(repo.root, '', [], inc, exc) |
|
229 | 229 | self.restrict = kwtools['hgcmd'] in restricted.split() |
|
230 | 230 | self.postcommit = False |
|
231 | 231 | |
|
232 | 232 | kwmaps = self.ui.configitems('keywordmaps') |
|
233 | 233 | if kwmaps: # override default templates |
|
234 | 234 | self.templates = dict(kwmaps) |
|
235 | 235 | else: |
|
236 | 236 | self.templates = _defaultkwmaps(self.ui) |
|
237 | 237 | |
|
238 | 238 | @property |
|
239 | 239 | def repo(self): |
|
240 | 240 | return self._repo() |
|
241 | 241 | |
|
242 | 242 | @util.propertycache |
|
243 | 243 | def escape(self): |
|
244 | 244 | '''Returns bar-separated and escaped keywords.''' |
|
245 | 245 | return '|'.join(map(re.escape, self.templates.keys())) |
|
246 | 246 | |
|
247 | 247 | @util.propertycache |
|
248 | 248 | def rekw(self): |
|
249 | 249 | '''Returns regex for unexpanded keywords.''' |
|
250 | 250 | return re.compile(r'\$(%s)\$' % self.escape) |
|
251 | 251 | |
|
252 | 252 | @util.propertycache |
|
253 | 253 | def rekwexp(self): |
|
254 | 254 | '''Returns regex for expanded keywords.''' |
|
255 | 255 | return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape) |
|
256 | 256 | |
|
257 | 257 | def substitute(self, data, path, ctx, subfunc): |
|
258 | 258 | '''Replaces keywords in data with expanded template.''' |
|
259 | 259 | def kwsub(mobj): |
|
260 | 260 | kw = mobj.group(1) |
|
261 | 261 | ct = logcmdutil.maketemplater(self.ui, self.repo, |
|
262 | 262 | self.templates[kw]) |
|
263 | 263 | self.ui.pushbuffer() |
|
264 | 264 | ct.show(ctx, root=self.repo.root, file=path) |
|
265 | 265 | ekw = templatefilters.firstline(self.ui.popbuffer()) |
|
266 | 266 | return '$%s: %s $' % (kw, ekw) |
|
267 | 267 | return subfunc(kwsub, data) |
|
268 | 268 | |
|
269 | 269 | def linkctx(self, path, fileid): |
|
270 | 270 | '''Similar to filelog.linkrev, but returns a changectx.''' |
|
271 | 271 | return self.repo.filectx(path, fileid=fileid).changectx() |
|
272 | 272 | |
|
273 | 273 | def expand(self, path, node, data): |
|
274 | 274 | '''Returns data with keywords expanded.''' |
|
275 | 275 | if not self.restrict and self.match(path) and not util.binary(data): |
|
276 | 276 | ctx = self.linkctx(path, node) |
|
277 | 277 | return self.substitute(data, path, ctx, self.rekw.sub) |
|
278 | 278 | return data |
|
279 | 279 | |
|
280 | 280 | def iskwfile(self, cand, ctx): |
|
281 | 281 | '''Returns subset of candidates which are configured for keyword |
|
282 | 282 | expansion but are not symbolic links.''' |
|
283 | 283 | return [f for f in cand if self.match(f) and 'l' not in ctx.flags(f)] |
|
284 | 284 | |
|
285 | 285 | def overwrite(self, ctx, candidates, lookup, expand, rekw=False): |
|
286 | 286 | '''Overwrites selected files expanding/shrinking keywords.''' |
|
287 | 287 | if self.restrict or lookup or self.postcommit: # exclude kw_copy |
|
288 | 288 | candidates = self.iskwfile(candidates, ctx) |
|
289 | 289 | if not candidates: |
|
290 | 290 | return |
|
291 | 291 | kwcmd = self.restrict and lookup # kwexpand/kwshrink |
|
292 | 292 | if self.restrict or expand and lookup: |
|
293 | 293 | mf = ctx.manifest() |
|
294 | 294 | if self.restrict or rekw: |
|
295 | 295 | re_kw = self.rekw |
|
296 | 296 | else: |
|
297 | 297 | re_kw = self.rekwexp |
|
298 | 298 | if expand: |
|
299 | 299 | msg = _('overwriting %s expanding keywords\n') |
|
300 | 300 | else: |
|
301 | 301 | msg = _('overwriting %s shrinking keywords\n') |
|
302 | 302 | for f in candidates: |
|
303 | 303 | if self.restrict: |
|
304 | 304 | data = self.repo.file(f).read(mf[f]) |
|
305 | 305 | else: |
|
306 | 306 | data = self.repo.wread(f) |
|
307 | 307 | if util.binary(data): |
|
308 | 308 | continue |
|
309 | 309 | if expand: |
|
310 | 310 | parents = ctx.parents() |
|
311 | 311 | if lookup: |
|
312 | 312 | ctx = self.linkctx(f, mf[f]) |
|
313 | 313 | elif self.restrict and len(parents) > 1: |
|
314 | 314 | # merge commit |
|
315 | 315 | # in case of conflict f is in modified state during |
|
316 | 316 | # merge, even if f does not differ from f in parent |
|
317 | 317 | for p in parents: |
|
318 | 318 | if f in p and not p[f].cmp(ctx[f]): |
|
319 | 319 | ctx = p[f].changectx() |
|
320 | 320 | break |
|
321 | 321 | data, found = self.substitute(data, f, ctx, re_kw.subn) |
|
322 | 322 | elif self.restrict: |
|
323 | 323 | found = re_kw.search(data) |
|
324 | 324 | else: |
|
325 | 325 | data, found = _shrinktext(data, re_kw.subn) |
|
326 | 326 | if found: |
|
327 | 327 | self.ui.note(msg % f) |
|
328 | 328 | fp = self.repo.wvfs(f, "wb", atomictemp=True) |
|
329 | 329 | fp.write(data) |
|
330 | 330 | fp.close() |
|
331 | 331 | if kwcmd: |
|
332 | 332 | self.repo.dirstate.normal(f) |
|
333 | 333 | elif self.postcommit: |
|
334 | 334 | self.repo.dirstate.normallookup(f) |
|
335 | 335 | |
|
336 | 336 | def shrink(self, fname, text): |
|
337 | 337 | '''Returns text with all keyword substitutions removed.''' |
|
338 | 338 | if self.match(fname) and not util.binary(text): |
|
339 | 339 | return _shrinktext(text, self.rekwexp.sub) |
|
340 | 340 | return text |
|
341 | 341 | |
|
342 | 342 | def shrinklines(self, fname, lines): |
|
343 | 343 | '''Returns lines with keyword substitutions removed.''' |
|
344 | 344 | if self.match(fname): |
|
345 | 345 | text = ''.join(lines) |
|
346 | 346 | if not util.binary(text): |
|
347 | 347 | return _shrinktext(text, self.rekwexp.sub).splitlines(True) |
|
348 | 348 | return lines |
|
349 | 349 | |
|
350 | 350 | def wread(self, fname, data): |
|
351 | 351 | '''If in restricted mode returns data read from wdir with |
|
352 | 352 | keyword substitutions removed.''' |
|
353 | 353 | if self.restrict: |
|
354 | 354 | return self.shrink(fname, data) |
|
355 | 355 | return data |
|
356 | 356 | |
|
357 | 357 | class kwfilelog(filelog.filelog): |
|
358 | 358 | ''' |
|
359 | 359 | Subclass of filelog to hook into its read, add, cmp methods. |
|
360 | 360 | Keywords are "stored" unexpanded, and processed on reading. |
|
361 | 361 | ''' |
|
362 | 362 | def __init__(self, opener, kwt, path): |
|
363 | 363 | super(kwfilelog, self).__init__(opener, path) |
|
364 | 364 | self.kwt = kwt |
|
365 | 365 | self.path = path |
|
366 | 366 | |
|
367 | 367 | def read(self, node): |
|
368 | 368 | '''Expands keywords when reading filelog.''' |
|
369 | 369 | data = super(kwfilelog, self).read(node) |
|
370 | 370 | if self.renamed(node): |
|
371 | 371 | return data |
|
372 | 372 | return self.kwt.expand(self.path, node, data) |
|
373 | 373 | |
|
374 | 374 | def add(self, text, meta, tr, link, p1=None, p2=None): |
|
375 | 375 | '''Removes keyword substitutions when adding to filelog.''' |
|
376 | 376 | text = self.kwt.shrink(self.path, text) |
|
377 | 377 | return super(kwfilelog, self).add(text, meta, tr, link, p1, p2) |
|
378 | 378 | |
|
379 | 379 | def cmp(self, node, text): |
|
380 | 380 | '''Removes keyword substitutions for comparison.''' |
|
381 | 381 | text = self.kwt.shrink(self.path, text) |
|
382 | 382 | return super(kwfilelog, self).cmp(node, text) |
|
383 | 383 | |
|
384 | 384 | def _status(ui, repo, wctx, kwt, *pats, **opts): |
|
385 | 385 | '''Bails out if [keyword] configuration is not active. |
|
386 | 386 | Returns status of working directory.''' |
|
387 | 387 | if kwt: |
|
388 | 388 | opts = pycompat.byteskwargs(opts) |
|
389 | 389 | return repo.status(match=scmutil.match(wctx, pats, opts), clean=True, |
|
390 | 390 | unknown=opts.get('unknown') or opts.get('all')) |
|
391 | 391 | if ui.configitems('keyword'): |
|
392 | 392 | raise error.Abort(_('[keyword] patterns cannot match')) |
|
393 | 393 | raise error.Abort(_('no [keyword] patterns configured')) |
|
394 | 394 | |
|
395 | 395 | def _kwfwrite(ui, repo, expand, *pats, **opts): |
|
396 | 396 | '''Selects files and passes them to kwtemplater.overwrite.''' |
|
397 | 397 | wctx = repo[None] |
|
398 | 398 | if len(wctx.parents()) > 1: |
|
399 | 399 | raise error.Abort(_('outstanding uncommitted merge')) |
|
400 | 400 | kwt = getattr(repo, '_keywordkwt', None) |
|
401 | 401 | with repo.wlock(): |
|
402 | 402 | status = _status(ui, repo, wctx, kwt, *pats, **opts) |
|
403 | 403 | if status.modified or status.added or status.removed or status.deleted: |
|
404 | 404 | raise error.Abort(_('outstanding uncommitted changes')) |
|
405 | 405 | kwt.overwrite(wctx, status.clean, True, expand) |
|
406 | 406 | |
|
407 | 407 | @command('kwdemo', |
|
408 | 408 | [('d', 'default', None, _('show default keyword template maps')), |
|
409 | 409 | ('f', 'rcfile', '', |
|
410 | 410 | _('read maps from rcfile'), _('FILE'))], |
|
411 | 411 | _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'), |
|
412 | 412 | optionalrepo=True) |
|
413 | 413 | def demo(ui, repo, *args, **opts): |
|
414 | 414 | '''print [keywordmaps] configuration and an expansion example |
|
415 | 415 | |
|
416 | 416 | Show current, custom, or default keyword template maps and their |
|
417 | 417 | expansions. |
|
418 | 418 | |
|
419 | 419 | Extend the current configuration by specifying maps as arguments |
|
420 | 420 | and using -f/--rcfile to source an external hgrc file. |
|
421 | 421 | |
|
422 | 422 | Use -d/--default to disable current configuration. |
|
423 | 423 | |
|
424 | 424 | See :hg:`help templates` for information on templates and filters. |
|
425 | 425 | ''' |
|
426 | 426 | def demoitems(section, items): |
|
427 | 427 | ui.write('[%s]\n' % section) |
|
428 | 428 | for k, v in sorted(items): |
|
429 | 429 | ui.write('%s = %s\n' % (k, v)) |
|
430 | 430 | |
|
431 | 431 | fn = 'demo.txt' |
|
432 | 432 | tmpdir = tempfile.mkdtemp('', 'kwdemo.') |
|
433 | 433 | ui.note(_('creating temporary repository at %s\n') % tmpdir) |
|
434 | 434 | if repo is None: |
|
435 | 435 | baseui = ui |
|
436 | 436 | else: |
|
437 | 437 | baseui = repo.baseui |
|
438 | 438 | repo = localrepo.localrepository(baseui, tmpdir, True) |
|
439 | 439 | ui.setconfig('keyword', fn, '', 'keyword') |
|
440 | 440 | svn = ui.configbool('keywordset', 'svn') |
|
441 | 441 | # explicitly set keywordset for demo output |
|
442 | 442 | ui.setconfig('keywordset', 'svn', svn, 'keyword') |
|
443 | 443 | |
|
444 | 444 | uikwmaps = ui.configitems('keywordmaps') |
|
445 | 445 | if args or opts.get(r'rcfile'): |
|
446 | 446 | ui.status(_('\n\tconfiguration using custom keyword template maps\n')) |
|
447 | 447 | if uikwmaps: |
|
448 | 448 | ui.status(_('\textending current template maps\n')) |
|
449 | 449 | if opts.get(r'default') or not uikwmaps: |
|
450 | 450 | if svn: |
|
451 | 451 | ui.status(_('\toverriding default svn keywordset\n')) |
|
452 | 452 | else: |
|
453 | 453 | ui.status(_('\toverriding default cvs keywordset\n')) |
|
454 | 454 | if opts.get(r'rcfile'): |
|
455 | 455 | ui.readconfig(opts.get('rcfile')) |
|
456 | 456 | if args: |
|
457 | 457 | # simulate hgrc parsing |
|
458 | 458 | rcmaps = '[keywordmaps]\n%s\n' % '\n'.join(args) |
|
459 | 459 | repo.vfs.write('hgrc', rcmaps) |
|
460 | 460 | ui.readconfig(repo.vfs.join('hgrc')) |
|
461 | 461 | kwmaps = dict(ui.configitems('keywordmaps')) |
|
462 | 462 | elif opts.get(r'default'): |
|
463 | 463 | if svn: |
|
464 | 464 | ui.status(_('\n\tconfiguration using default svn keywordset\n')) |
|
465 | 465 | else: |
|
466 | 466 | ui.status(_('\n\tconfiguration using default cvs keywordset\n')) |
|
467 | 467 | kwmaps = _defaultkwmaps(ui) |
|
468 | 468 | if uikwmaps: |
|
469 | 469 | ui.status(_('\tdisabling current template maps\n')) |
|
470 | 470 | for k, v in kwmaps.iteritems(): |
|
471 | 471 | ui.setconfig('keywordmaps', k, v, 'keyword') |
|
472 | 472 | else: |
|
473 | 473 | ui.status(_('\n\tconfiguration using current keyword template maps\n')) |
|
474 | 474 | if uikwmaps: |
|
475 | 475 | kwmaps = dict(uikwmaps) |
|
476 | 476 | else: |
|
477 | 477 | kwmaps = _defaultkwmaps(ui) |
|
478 | 478 | |
|
479 | 479 | uisetup(ui) |
|
480 | 480 | reposetup(ui, repo) |
|
481 | 481 | ui.write(('[extensions]\nkeyword =\n')) |
|
482 | 482 | demoitems('keyword', ui.configitems('keyword')) |
|
483 | 483 | demoitems('keywordset', ui.configitems('keywordset')) |
|
484 | 484 | demoitems('keywordmaps', kwmaps.iteritems()) |
|
485 | 485 | keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n' |
|
486 | 486 | repo.wvfs.write(fn, keywords) |
|
487 | 487 | repo[None].add([fn]) |
|
488 | 488 | ui.note(_('\nkeywords written to %s:\n') % fn) |
|
489 | 489 | ui.note(keywords) |
|
490 | 490 | with repo.wlock(): |
|
491 | 491 | repo.dirstate.setbranch('demobranch') |
|
492 | 492 | for name, cmd in ui.configitems('hooks'): |
|
493 | 493 | if name.split('.', 1)[0].find('commit') > -1: |
|
494 | 494 | repo.ui.setconfig('hooks', name, '', 'keyword') |
|
495 | 495 | msg = _('hg keyword configuration and expansion example') |
|
496 | 496 | ui.note(("hg ci -m '%s'\n" % msg)) |
|
497 | 497 | repo.commit(text=msg) |
|
498 | 498 | ui.status(_('\n\tkeywords expanded\n')) |
|
499 | 499 | ui.write(repo.wread(fn)) |
|
500 | 500 | repo.wvfs.rmtree(repo.root) |
|
501 | 501 | |
|
502 | 502 | @command('kwexpand', |
|
503 | 503 | cmdutil.walkopts, |
|
504 | 504 | _('hg kwexpand [OPTION]... [FILE]...'), |
|
505 | 505 | inferrepo=True) |
|
506 | 506 | def expand(ui, repo, *pats, **opts): |
|
507 | 507 | '''expand keywords in the working directory |
|
508 | 508 | |
|
509 | 509 | Run after (re)enabling keyword expansion. |
|
510 | 510 | |
|
511 | 511 | kwexpand refuses to run if given files contain local changes. |
|
512 | 512 | ''' |
|
513 | 513 | # 3rd argument sets expansion to True |
|
514 | 514 | _kwfwrite(ui, repo, True, *pats, **opts) |
|
515 | 515 | |
|
516 | 516 | @command('kwfiles', |
|
517 | 517 | [('A', 'all', None, _('show keyword status flags of all files')), |
|
518 | 518 | ('i', 'ignore', None, _('show files excluded from expansion')), |
|
519 | 519 | ('u', 'unknown', None, _('only show unknown (not tracked) files')), |
|
520 | 520 | ] + cmdutil.walkopts, |
|
521 | 521 | _('hg kwfiles [OPTION]... [FILE]...'), |
|
522 | 522 | inferrepo=True) |
|
523 | 523 | def files(ui, repo, *pats, **opts): |
|
524 | 524 | '''show files configured for keyword expansion |
|
525 | 525 | |
|
526 | 526 | List which files in the working directory are matched by the |
|
527 | 527 | [keyword] configuration patterns. |
|
528 | 528 | |
|
529 | 529 | Useful to prevent inadvertent keyword expansion and to speed up |
|
530 | 530 | execution by including only files that are actual candidates for |
|
531 | 531 | expansion. |
|
532 | 532 | |
|
533 | 533 | See :hg:`help keyword` on how to construct patterns both for |
|
534 | 534 | inclusion and exclusion of files. |
|
535 | 535 | |
|
536 | 536 | With -A/--all and -v/--verbose the codes used to show the status |
|
537 | 537 | of files are:: |
|
538 | 538 | |
|
539 | 539 | K = keyword expansion candidate |
|
540 | 540 | k = keyword expansion candidate (not tracked) |
|
541 | 541 | I = ignored |
|
542 | 542 | i = ignored (not tracked) |
|
543 | 543 | ''' |
|
544 | 544 | kwt = getattr(repo, '_keywordkwt', None) |
|
545 | 545 | wctx = repo[None] |
|
546 | 546 | status = _status(ui, repo, wctx, kwt, *pats, **opts) |
|
547 | 547 | if pats: |
|
548 | 548 | cwd = repo.getcwd() |
|
549 | 549 | else: |
|
550 | 550 | cwd = '' |
|
551 | 551 | files = [] |
|
552 | 552 | opts = pycompat.byteskwargs(opts) |
|
553 | 553 | if not opts.get('unknown') or opts.get('all'): |
|
554 | 554 | files = sorted(status.modified + status.added + status.clean) |
|
555 | 555 | kwfiles = kwt.iskwfile(files, wctx) |
|
556 | 556 | kwdeleted = kwt.iskwfile(status.deleted, wctx) |
|
557 | 557 | kwunknown = kwt.iskwfile(status.unknown, wctx) |
|
558 | 558 | if not opts.get('ignore') or opts.get('all'): |
|
559 | 559 | showfiles = kwfiles, kwdeleted, kwunknown |
|
560 | 560 | else: |
|
561 | 561 | showfiles = [], [], [] |
|
562 | 562 | if opts.get('all') or opts.get('ignore'): |
|
563 | 563 | showfiles += ([f for f in files if f not in kwfiles], |
|
564 | 564 | [f for f in status.unknown if f not in kwunknown]) |
|
565 | 565 | kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split() |
|
566 | 566 | kwstates = zip(kwlabels, 'K!kIi', showfiles) |
|
567 | 567 | fm = ui.formatter('kwfiles', opts) |
|
568 | 568 | fmt = '%.0s%s\n' |
|
569 | 569 | if opts.get('all') or ui.verbose: |
|
570 | 570 | fmt = '%s %s\n' |
|
571 | 571 | for kwstate, char, filenames in kwstates: |
|
572 | 572 | label = 'kwfiles.' + kwstate |
|
573 | 573 | for f in filenames: |
|
574 | 574 | fm.startitem() |
|
575 | 575 | fm.write('kwstatus path', fmt, char, |
|
576 | 576 | repo.pathto(f, cwd), label=label) |
|
577 | 577 | fm.end() |
|
578 | 578 | |
|
579 | 579 | @command('kwshrink', |
|
580 | 580 | cmdutil.walkopts, |
|
581 | 581 | _('hg kwshrink [OPTION]... [FILE]...'), |
|
582 | 582 | inferrepo=True) |
|
583 | 583 | def shrink(ui, repo, *pats, **opts): |
|
584 | 584 | '''revert expanded keywords in the working directory |
|
585 | 585 | |
|
586 | 586 | Must be run before changing/disabling active keywords. |
|
587 | 587 | |
|
588 | 588 | kwshrink refuses to run if given files contain local changes. |
|
589 | 589 | ''' |
|
590 | 590 | # 3rd argument sets expansion to False |
|
591 | 591 | _kwfwrite(ui, repo, False, *pats, **opts) |
|
592 | 592 | |
|
593 | 593 | # monkeypatches |
|
594 | 594 | |
|
595 | 595 | def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None): |
|
596 | 596 | '''Monkeypatch/wrap patch.patchfile.__init__ to avoid |
|
597 | 597 | rejects or conflicts due to expanded keywords in working dir.''' |
|
598 | 598 | orig(self, ui, gp, backend, store, eolmode) |
|
599 | 599 | kwt = getattr(getattr(backend, 'repo', None), '_keywordkwt', None) |
|
600 | 600 | if kwt: |
|
601 | 601 | # shrink keywords read from working dir |
|
602 | 602 | self.lines = kwt.shrinklines(self.fname, self.lines) |
|
603 | 603 | |
|
604 | 604 | def kwdiff(orig, repo, *args, **kwargs): |
|
605 | 605 | '''Monkeypatch patch.diff to avoid expansion.''' |
|
606 | 606 | kwt = getattr(repo, '_keywordkwt', None) |
|
607 | 607 | if kwt: |
|
608 | 608 | restrict = kwt.restrict |
|
609 | 609 | kwt.restrict = True |
|
610 | 610 | try: |
|
611 | 611 | for chunk in orig(repo, *args, **kwargs): |
|
612 | 612 | yield chunk |
|
613 | 613 | finally: |
|
614 | 614 | if kwt: |
|
615 | 615 | kwt.restrict = restrict |
|
616 | 616 | |
|
617 | 617 | def kwweb_skip(orig, web, req, tmpl): |
|
618 | 618 | '''Wraps webcommands.x turning off keyword expansion.''' |
|
619 | 619 | kwt = getattr(web.repo, '_keywordkwt', None) |
|
620 | 620 | if kwt: |
|
621 | 621 | origmatch = kwt.match |
|
622 | 622 | kwt.match = util.never |
|
623 | 623 | try: |
|
624 |
|
|
|
624 | res = orig(web, req, tmpl) | |
|
625 | if res is web.res: | |
|
626 | res = res.sendresponse() | |
|
627 | for chunk in res: | |
|
625 | 628 | yield chunk |
|
626 | 629 | finally: |
|
627 | 630 | if kwt: |
|
628 | 631 | kwt.match = origmatch |
|
629 | 632 | |
|
630 | 633 | def kw_amend(orig, ui, repo, old, extra, pats, opts): |
|
631 | 634 | '''Wraps cmdutil.amend expanding keywords after amend.''' |
|
632 | 635 | kwt = getattr(repo, '_keywordkwt', None) |
|
633 | 636 | if kwt is None: |
|
634 | 637 | return orig(ui, repo, old, extra, pats, opts) |
|
635 | 638 | with repo.wlock(): |
|
636 | 639 | kwt.postcommit = True |
|
637 | 640 | newid = orig(ui, repo, old, extra, pats, opts) |
|
638 | 641 | if newid != old.node(): |
|
639 | 642 | ctx = repo[newid] |
|
640 | 643 | kwt.restrict = True |
|
641 | 644 | kwt.overwrite(ctx, ctx.files(), False, True) |
|
642 | 645 | kwt.restrict = False |
|
643 | 646 | return newid |
|
644 | 647 | |
|
645 | 648 | def kw_copy(orig, ui, repo, pats, opts, rename=False): |
|
646 | 649 | '''Wraps cmdutil.copy so that copy/rename destinations do not |
|
647 | 650 | contain expanded keywords. |
|
648 | 651 | Note that the source of a regular file destination may also be a |
|
649 | 652 | symlink: |
|
650 | 653 | hg cp sym x -> x is symlink |
|
651 | 654 | cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords) |
|
652 | 655 | For the latter we have to follow the symlink to find out whether its |
|
653 | 656 | target is configured for expansion and we therefore must unexpand the |
|
654 | 657 | keywords in the destination.''' |
|
655 | 658 | kwt = getattr(repo, '_keywordkwt', None) |
|
656 | 659 | if kwt is None: |
|
657 | 660 | return orig(ui, repo, pats, opts, rename) |
|
658 | 661 | with repo.wlock(): |
|
659 | 662 | orig(ui, repo, pats, opts, rename) |
|
660 | 663 | if opts.get('dry_run'): |
|
661 | 664 | return |
|
662 | 665 | wctx = repo[None] |
|
663 | 666 | cwd = repo.getcwd() |
|
664 | 667 | |
|
665 | 668 | def haskwsource(dest): |
|
666 | 669 | '''Returns true if dest is a regular file and configured for |
|
667 | 670 | expansion or a symlink which points to a file configured for |
|
668 | 671 | expansion. ''' |
|
669 | 672 | source = repo.dirstate.copied(dest) |
|
670 | 673 | if 'l' in wctx.flags(source): |
|
671 | 674 | source = pathutil.canonpath(repo.root, cwd, |
|
672 | 675 | os.path.realpath(source)) |
|
673 | 676 | return kwt.match(source) |
|
674 | 677 | |
|
675 | 678 | candidates = [f for f in repo.dirstate.copies() if |
|
676 | 679 | 'l' not in wctx.flags(f) and haskwsource(f)] |
|
677 | 680 | kwt.overwrite(wctx, candidates, False, False) |
|
678 | 681 | |
|
679 | 682 | def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts): |
|
680 | 683 | '''Wraps record.dorecord expanding keywords after recording.''' |
|
681 | 684 | kwt = getattr(repo, '_keywordkwt', None) |
|
682 | 685 | if kwt is None: |
|
683 | 686 | return orig(ui, repo, commitfunc, *pats, **opts) |
|
684 | 687 | with repo.wlock(): |
|
685 | 688 | # record returns 0 even when nothing has changed |
|
686 | 689 | # therefore compare nodes before and after |
|
687 | 690 | kwt.postcommit = True |
|
688 | 691 | ctx = repo['.'] |
|
689 | 692 | wstatus = ctx.status() |
|
690 | 693 | ret = orig(ui, repo, commitfunc, *pats, **opts) |
|
691 | 694 | recctx = repo['.'] |
|
692 | 695 | if ctx != recctx: |
|
693 | 696 | modified, added = _preselect(wstatus, recctx.files()) |
|
694 | 697 | kwt.restrict = False |
|
695 | 698 | kwt.overwrite(recctx, modified, False, True) |
|
696 | 699 | kwt.overwrite(recctx, added, False, True, True) |
|
697 | 700 | kwt.restrict = True |
|
698 | 701 | return ret |
|
699 | 702 | |
|
700 | 703 | def kwfilectx_cmp(orig, self, fctx): |
|
701 | 704 | if fctx._customcmp: |
|
702 | 705 | return fctx.cmp(self) |
|
703 | 706 | kwt = getattr(self._repo, '_keywordkwt', None) |
|
704 | 707 | if kwt is None: |
|
705 | 708 | return orig(self, fctx) |
|
706 | 709 | # keyword affects data size, comparing wdir and filelog size does |
|
707 | 710 | # not make sense |
|
708 | 711 | if (fctx._filenode is None and |
|
709 | 712 | (self._repo._encodefilterpats or |
|
710 | 713 | kwt.match(fctx.path()) and 'l' not in fctx.flags() or |
|
711 | 714 | self.size() - 4 == fctx.size()) or |
|
712 | 715 | self.size() == fctx.size()): |
|
713 | 716 | return self._filelog.cmp(self._filenode, fctx.data()) |
|
714 | 717 | return True |
|
715 | 718 | |
|
716 | 719 | def uisetup(ui): |
|
717 | 720 | ''' Monkeypatches dispatch._parse to retrieve user command. |
|
718 | 721 | Overrides file method to return kwfilelog instead of filelog |
|
719 | 722 | if file matches user configuration. |
|
720 | 723 | Wraps commit to overwrite configured files with updated |
|
721 | 724 | keyword substitutions. |
|
722 | 725 | Monkeypatches patch and webcommands.''' |
|
723 | 726 | |
|
724 | 727 | def kwdispatch_parse(orig, ui, args): |
|
725 | 728 | '''Monkeypatch dispatch._parse to obtain running hg command.''' |
|
726 | 729 | cmd, func, args, options, cmdoptions = orig(ui, args) |
|
727 | 730 | kwtools['hgcmd'] = cmd |
|
728 | 731 | return cmd, func, args, options, cmdoptions |
|
729 | 732 | |
|
730 | 733 | extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse) |
|
731 | 734 | |
|
732 | 735 | extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp) |
|
733 | 736 | extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init) |
|
734 | 737 | extensions.wrapfunction(patch, 'diff', kwdiff) |
|
735 | 738 | extensions.wrapfunction(cmdutil, 'amend', kw_amend) |
|
736 | 739 | extensions.wrapfunction(cmdutil, 'copy', kw_copy) |
|
737 | 740 | extensions.wrapfunction(cmdutil, 'dorecord', kw_dorecord) |
|
738 | 741 | for c in nokwwebcommands.split(): |
|
739 | 742 | extensions.wrapfunction(webcommands, c, kwweb_skip) |
|
740 | 743 | |
|
741 | 744 | def reposetup(ui, repo): |
|
742 | 745 | '''Sets up repo as kwrepo for keyword substitution.''' |
|
743 | 746 | |
|
744 | 747 | try: |
|
745 | 748 | if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split() |
|
746 | 749 | or '.hg' in util.splitpath(repo.root) |
|
747 | 750 | or repo._url.startswith('bundle:')): |
|
748 | 751 | return |
|
749 | 752 | except AttributeError: |
|
750 | 753 | pass |
|
751 | 754 | |
|
752 | 755 | inc, exc = [], ['.hg*'] |
|
753 | 756 | for pat, opt in ui.configitems('keyword'): |
|
754 | 757 | if opt != 'ignore': |
|
755 | 758 | inc.append(pat) |
|
756 | 759 | else: |
|
757 | 760 | exc.append(pat) |
|
758 | 761 | if not inc: |
|
759 | 762 | return |
|
760 | 763 | |
|
761 | 764 | kwt = kwtemplater(ui, repo, inc, exc) |
|
762 | 765 | |
|
763 | 766 | class kwrepo(repo.__class__): |
|
764 | 767 | def file(self, f): |
|
765 | 768 | if f[0] == '/': |
|
766 | 769 | f = f[1:] |
|
767 | 770 | return kwfilelog(self.svfs, kwt, f) |
|
768 | 771 | |
|
769 | 772 | def wread(self, filename): |
|
770 | 773 | data = super(kwrepo, self).wread(filename) |
|
771 | 774 | return kwt.wread(filename, data) |
|
772 | 775 | |
|
773 | 776 | def commit(self, *args, **opts): |
|
774 | 777 | # use custom commitctx for user commands |
|
775 | 778 | # other extensions can still wrap repo.commitctx directly |
|
776 | 779 | self.commitctx = self.kwcommitctx |
|
777 | 780 | try: |
|
778 | 781 | return super(kwrepo, self).commit(*args, **opts) |
|
779 | 782 | finally: |
|
780 | 783 | del self.commitctx |
|
781 | 784 | |
|
782 | 785 | def kwcommitctx(self, ctx, error=False): |
|
783 | 786 | n = super(kwrepo, self).commitctx(ctx, error) |
|
784 | 787 | # no lock needed, only called from repo.commit() which already locks |
|
785 | 788 | if not kwt.postcommit: |
|
786 | 789 | restrict = kwt.restrict |
|
787 | 790 | kwt.restrict = True |
|
788 | 791 | kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()), |
|
789 | 792 | False, True) |
|
790 | 793 | kwt.restrict = restrict |
|
791 | 794 | return n |
|
792 | 795 | |
|
793 | 796 | def rollback(self, dryrun=False, force=False): |
|
794 | 797 | with self.wlock(): |
|
795 | 798 | origrestrict = kwt.restrict |
|
796 | 799 | try: |
|
797 | 800 | if not dryrun: |
|
798 | 801 | changed = self['.'].files() |
|
799 | 802 | ret = super(kwrepo, self).rollback(dryrun, force) |
|
800 | 803 | if not dryrun: |
|
801 | 804 | ctx = self['.'] |
|
802 | 805 | modified, added = _preselect(ctx.status(), changed) |
|
803 | 806 | kwt.restrict = False |
|
804 | 807 | kwt.overwrite(ctx, modified, True, True) |
|
805 | 808 | kwt.overwrite(ctx, added, True, False) |
|
806 | 809 | return ret |
|
807 | 810 | finally: |
|
808 | 811 | kwt.restrict = origrestrict |
|
809 | 812 | |
|
810 | 813 | repo.__class__ = kwrepo |
|
811 | 814 | repo._keywordkwt = kwt |
@@ -1,448 +1,458 b'' | |||
|
1 | 1 | # hgweb/hgweb_mod.py - Web interface for a repository. |
|
2 | 2 | # |
|
3 | 3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
4 | 4 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
5 | 5 | # |
|
6 | 6 | # This software may be used and distributed according to the terms of the |
|
7 | 7 | # GNU General Public License version 2 or any later version. |
|
8 | 8 | |
|
9 | 9 | from __future__ import absolute_import |
|
10 | 10 | |
|
11 | 11 | import contextlib |
|
12 | 12 | import os |
|
13 | 13 | |
|
14 | 14 | from .common import ( |
|
15 | 15 | ErrorResponse, |
|
16 | 16 | HTTP_BAD_REQUEST, |
|
17 | 17 | HTTP_NOT_FOUND, |
|
18 | 18 | HTTP_NOT_MODIFIED, |
|
19 | 19 | HTTP_OK, |
|
20 | 20 | HTTP_SERVER_ERROR, |
|
21 | 21 | cspvalues, |
|
22 | 22 | permhooks, |
|
23 | 23 | ) |
|
24 | 24 | |
|
25 | 25 | from .. import ( |
|
26 | 26 | encoding, |
|
27 | 27 | error, |
|
28 | 28 | formatter, |
|
29 | 29 | hg, |
|
30 | 30 | hook, |
|
31 | 31 | profiling, |
|
32 | 32 | pycompat, |
|
33 | 33 | repoview, |
|
34 | 34 | templatefilters, |
|
35 | 35 | templater, |
|
36 | 36 | ui as uimod, |
|
37 | 37 | util, |
|
38 | 38 | wireprotoserver, |
|
39 | 39 | ) |
|
40 | 40 | |
|
41 | 41 | from . import ( |
|
42 | 42 | request as requestmod, |
|
43 | 43 | webcommands, |
|
44 | 44 | webutil, |
|
45 | 45 | wsgicgi, |
|
46 | 46 | ) |
|
47 | 47 | |
|
48 | 48 | archivespecs = util.sortdict(( |
|
49 | 49 | ('zip', ('application/zip', 'zip', '.zip', None)), |
|
50 | 50 | ('gz', ('application/x-gzip', 'tgz', '.tar.gz', None)), |
|
51 | 51 | ('bz2', ('application/x-bzip2', 'tbz2', '.tar.bz2', None)), |
|
52 | 52 | )) |
|
53 | 53 | |
|
54 | 54 | def getstyle(req, configfn, templatepath): |
|
55 | 55 | styles = ( |
|
56 | 56 | req.qsparams.get('style', None), |
|
57 | 57 | configfn('web', 'style'), |
|
58 | 58 | 'paper', |
|
59 | 59 | ) |
|
60 | 60 | return styles, templater.stylemap(styles, templatepath) |
|
61 | 61 | |
|
62 | 62 | def makebreadcrumb(url, prefix=''): |
|
63 | 63 | '''Return a 'URL breadcrumb' list |
|
64 | 64 | |
|
65 | 65 | A 'URL breadcrumb' is a list of URL-name pairs, |
|
66 | 66 | corresponding to each of the path items on a URL. |
|
67 | 67 | This can be used to create path navigation entries. |
|
68 | 68 | ''' |
|
69 | 69 | if url.endswith('/'): |
|
70 | 70 | url = url[:-1] |
|
71 | 71 | if prefix: |
|
72 | 72 | url = '/' + prefix + url |
|
73 | 73 | relpath = url |
|
74 | 74 | if relpath.startswith('/'): |
|
75 | 75 | relpath = relpath[1:] |
|
76 | 76 | |
|
77 | 77 | breadcrumb = [] |
|
78 | 78 | urlel = url |
|
79 | 79 | pathitems = [''] + relpath.split('/') |
|
80 | 80 | for pathel in reversed(pathitems): |
|
81 | 81 | if not pathel or not urlel: |
|
82 | 82 | break |
|
83 | 83 | breadcrumb.append({'url': urlel, 'name': pathel}) |
|
84 | 84 | urlel = os.path.dirname(urlel) |
|
85 | 85 | return reversed(breadcrumb) |
|
86 | 86 | |
|
87 | 87 | class requestcontext(object): |
|
88 | 88 | """Holds state/context for an individual request. |
|
89 | 89 | |
|
90 | 90 | Servers can be multi-threaded. Holding state on the WSGI application |
|
91 | 91 | is prone to race conditions. Instances of this class exist to hold |
|
92 | 92 | mutable and race-free state for requests. |
|
93 | 93 | """ |
|
94 | def __init__(self, app, repo): | |
|
94 | def __init__(self, app, repo, req, res): | |
|
95 | 95 | self.repo = repo |
|
96 | 96 | self.reponame = app.reponame |
|
97 | self.req = req | |
|
98 | self.res = res | |
|
97 | 99 | |
|
98 | 100 | self.archivespecs = archivespecs |
|
99 | 101 | |
|
100 | 102 | self.maxchanges = self.configint('web', 'maxchanges') |
|
101 | 103 | self.stripecount = self.configint('web', 'stripes') |
|
102 | 104 | self.maxshortchanges = self.configint('web', 'maxshortchanges') |
|
103 | 105 | self.maxfiles = self.configint('web', 'maxfiles') |
|
104 | 106 | self.allowpull = self.configbool('web', 'allow-pull') |
|
105 | 107 | |
|
106 | 108 | # we use untrusted=False to prevent a repo owner from using |
|
107 | 109 | # web.templates in .hg/hgrc to get access to any file readable |
|
108 | 110 | # by the user running the CGI script |
|
109 | 111 | self.templatepath = self.config('web', 'templates', untrusted=False) |
|
110 | 112 | |
|
111 | 113 | # This object is more expensive to build than simple config values. |
|
112 | 114 | # It is shared across requests. The app will replace the object |
|
113 | 115 | # if it is updated. Since this is a reference and nothing should |
|
114 | 116 | # modify the underlying object, it should be constant for the lifetime |
|
115 | 117 | # of the request. |
|
116 | 118 | self.websubtable = app.websubtable |
|
117 | 119 | |
|
118 | 120 | self.csp, self.nonce = cspvalues(self.repo.ui) |
|
119 | 121 | |
|
120 | 122 | # Trust the settings from the .hg/hgrc files by default. |
|
121 | 123 | def config(self, section, name, default=uimod._unset, untrusted=True): |
|
122 | 124 | return self.repo.ui.config(section, name, default, |
|
123 | 125 | untrusted=untrusted) |
|
124 | 126 | |
|
125 | 127 | def configbool(self, section, name, default=uimod._unset, untrusted=True): |
|
126 | 128 | return self.repo.ui.configbool(section, name, default, |
|
127 | 129 | untrusted=untrusted) |
|
128 | 130 | |
|
129 | 131 | def configint(self, section, name, default=uimod._unset, untrusted=True): |
|
130 | 132 | return self.repo.ui.configint(section, name, default, |
|
131 | 133 | untrusted=untrusted) |
|
132 | 134 | |
|
133 | 135 | def configlist(self, section, name, default=uimod._unset, untrusted=True): |
|
134 | 136 | return self.repo.ui.configlist(section, name, default, |
|
135 | 137 | untrusted=untrusted) |
|
136 | 138 | |
|
137 | 139 | def archivelist(self, nodeid): |
|
138 | 140 | allowed = self.configlist('web', 'allow_archive') |
|
139 | 141 | for typ, spec in self.archivespecs.iteritems(): |
|
140 | 142 | if typ in allowed or self.configbool('web', 'allow%s' % typ): |
|
141 | 143 | yield {'type': typ, 'extension': spec[2], 'node': nodeid} |
|
142 | 144 | |
|
143 | 145 | def templater(self, req): |
|
144 | 146 | # determine scheme, port and server name |
|
145 | 147 | # this is needed to create absolute urls |
|
146 | 148 | logourl = self.config('web', 'logourl') |
|
147 | 149 | logoimg = self.config('web', 'logoimg') |
|
148 | 150 | staticurl = (self.config('web', 'staticurl') |
|
149 | 151 | or req.apppath + '/static/') |
|
150 | 152 | if not staticurl.endswith('/'): |
|
151 | 153 | staticurl += '/' |
|
152 | 154 | |
|
153 | 155 | # some functions for the templater |
|
154 | 156 | |
|
155 | 157 | def motd(**map): |
|
156 | 158 | yield self.config('web', 'motd') |
|
157 | 159 | |
|
158 | 160 | # figure out which style to use |
|
159 | 161 | |
|
160 | 162 | vars = {} |
|
161 | 163 | styles, (style, mapfile) = getstyle(req, self.config, |
|
162 | 164 | self.templatepath) |
|
163 | 165 | if style == styles[0]: |
|
164 | 166 | vars['style'] = style |
|
165 | 167 | |
|
166 | 168 | sessionvars = webutil.sessionvars(vars, '?') |
|
167 | 169 | |
|
168 | 170 | if not self.reponame: |
|
169 | 171 | self.reponame = (self.config('web', 'name', '') |
|
170 | 172 | or req.reponame |
|
171 | 173 | or req.apppath |
|
172 | 174 | or self.repo.root) |
|
173 | 175 | |
|
174 | 176 | def websubfilter(text): |
|
175 | 177 | return templatefilters.websub(text, self.websubtable) |
|
176 | 178 | |
|
177 | 179 | # create the templater |
|
178 | 180 | # TODO: export all keywords: defaults = templatekw.keywords.copy() |
|
179 | 181 | defaults = { |
|
180 | 182 | 'url': req.apppath + '/', |
|
181 | 183 | 'logourl': logourl, |
|
182 | 184 | 'logoimg': logoimg, |
|
183 | 185 | 'staticurl': staticurl, |
|
184 | 186 | 'urlbase': req.advertisedbaseurl, |
|
185 | 187 | 'repo': self.reponame, |
|
186 | 188 | 'encoding': encoding.encoding, |
|
187 | 189 | 'motd': motd, |
|
188 | 190 | 'sessionvars': sessionvars, |
|
189 | 191 | 'pathdef': makebreadcrumb(req.apppath), |
|
190 | 192 | 'style': style, |
|
191 | 193 | 'nonce': self.nonce, |
|
192 | 194 | } |
|
193 | 195 | tres = formatter.templateresources(self.repo.ui, self.repo) |
|
194 | 196 | tmpl = templater.templater.frommapfile(mapfile, |
|
195 | 197 | filters={'websub': websubfilter}, |
|
196 | 198 | defaults=defaults, |
|
197 | 199 | resources=tres) |
|
198 | 200 | return tmpl |
|
199 | 201 | |
|
200 | 202 | |
|
201 | 203 | class hgweb(object): |
|
202 | 204 | """HTTP server for individual repositories. |
|
203 | 205 | |
|
204 | 206 | Instances of this class serve HTTP responses for a particular |
|
205 | 207 | repository. |
|
206 | 208 | |
|
207 | 209 | Instances are typically used as WSGI applications. |
|
208 | 210 | |
|
209 | 211 | Some servers are multi-threaded. On these servers, there may |
|
210 | 212 | be multiple active threads inside __call__. |
|
211 | 213 | """ |
|
212 | 214 | def __init__(self, repo, name=None, baseui=None): |
|
213 | 215 | if isinstance(repo, str): |
|
214 | 216 | if baseui: |
|
215 | 217 | u = baseui.copy() |
|
216 | 218 | else: |
|
217 | 219 | u = uimod.ui.load() |
|
218 | 220 | r = hg.repository(u, repo) |
|
219 | 221 | else: |
|
220 | 222 | # we trust caller to give us a private copy |
|
221 | 223 | r = repo |
|
222 | 224 | |
|
223 | 225 | r.ui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') |
|
224 | 226 | r.baseui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') |
|
225 | 227 | r.ui.setconfig('ui', 'nontty', 'true', 'hgweb') |
|
226 | 228 | r.baseui.setconfig('ui', 'nontty', 'true', 'hgweb') |
|
227 | 229 | # resolve file patterns relative to repo root |
|
228 | 230 | r.ui.setconfig('ui', 'forcecwd', r.root, 'hgweb') |
|
229 | 231 | r.baseui.setconfig('ui', 'forcecwd', r.root, 'hgweb') |
|
230 | 232 | # displaying bundling progress bar while serving feel wrong and may |
|
231 | 233 | # break some wsgi implementation. |
|
232 | 234 | r.ui.setconfig('progress', 'disable', 'true', 'hgweb') |
|
233 | 235 | r.baseui.setconfig('progress', 'disable', 'true', 'hgweb') |
|
234 | 236 | self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))] |
|
235 | 237 | self._lastrepo = self._repos[0] |
|
236 | 238 | hook.redirect(True) |
|
237 | 239 | self.reponame = name |
|
238 | 240 | |
|
239 | 241 | def _webifyrepo(self, repo): |
|
240 | 242 | repo = getwebview(repo) |
|
241 | 243 | self.websubtable = webutil.getwebsubs(repo) |
|
242 | 244 | return repo |
|
243 | 245 | |
|
244 | 246 | @contextlib.contextmanager |
|
245 | 247 | def _obtainrepo(self): |
|
246 | 248 | """Obtain a repo unique to the caller. |
|
247 | 249 | |
|
248 | 250 | Internally we maintain a stack of cachedlocalrepo instances |
|
249 | 251 | to be handed out. If one is available, we pop it and return it, |
|
250 | 252 | ensuring it is up to date in the process. If one is not available, |
|
251 | 253 | we clone the most recently used repo instance and return it. |
|
252 | 254 | |
|
253 | 255 | It is currently possible for the stack to grow without bounds |
|
254 | 256 | if the server allows infinite threads. However, servers should |
|
255 | 257 | have a thread limit, thus establishing our limit. |
|
256 | 258 | """ |
|
257 | 259 | if self._repos: |
|
258 | 260 | cached = self._repos.pop() |
|
259 | 261 | r, created = cached.fetch() |
|
260 | 262 | else: |
|
261 | 263 | cached = self._lastrepo.copy() |
|
262 | 264 | r, created = cached.fetch() |
|
263 | 265 | if created: |
|
264 | 266 | r = self._webifyrepo(r) |
|
265 | 267 | |
|
266 | 268 | self._lastrepo = cached |
|
267 | 269 | self.mtime = cached.mtime |
|
268 | 270 | try: |
|
269 | 271 | yield r |
|
270 | 272 | finally: |
|
271 | 273 | self._repos.append(cached) |
|
272 | 274 | |
|
273 | 275 | def run(self): |
|
274 | 276 | """Start a server from CGI environment. |
|
275 | 277 | |
|
276 | 278 | Modern servers should be using WSGI and should avoid this |
|
277 | 279 | method, if possible. |
|
278 | 280 | """ |
|
279 | 281 | if not encoding.environ.get('GATEWAY_INTERFACE', |
|
280 | 282 | '').startswith("CGI/1."): |
|
281 | 283 | raise RuntimeError("This function is only intended to be " |
|
282 | 284 | "called while running as a CGI script.") |
|
283 | 285 | wsgicgi.launch(self) |
|
284 | 286 | |
|
285 | 287 | def __call__(self, env, respond): |
|
286 | 288 | """Run the WSGI application. |
|
287 | 289 | |
|
288 | 290 | This may be called by multiple threads. |
|
289 | 291 | """ |
|
290 | 292 | req = requestmod.wsgirequest(env, respond) |
|
291 | 293 | return self.run_wsgi(req) |
|
292 | 294 | |
|
293 | 295 | def run_wsgi(self, wsgireq): |
|
294 | 296 | """Internal method to run the WSGI application. |
|
295 | 297 | |
|
296 | 298 | This is typically only called by Mercurial. External consumers |
|
297 | 299 | should be using instances of this class as the WSGI application. |
|
298 | 300 | """ |
|
299 | 301 | with self._obtainrepo() as repo: |
|
300 | 302 | profile = repo.ui.configbool('profiling', 'enabled') |
|
301 | 303 | with profiling.profile(repo.ui, enabled=profile): |
|
302 | 304 | for r in self._runwsgi(wsgireq, repo): |
|
303 | 305 | yield r |
|
304 | 306 | |
|
305 | 307 | def _runwsgi(self, wsgireq, repo): |
|
306 | 308 | req = wsgireq.req |
|
307 | 309 | res = wsgireq.res |
|
308 | rctx = requestcontext(self, repo) | |
|
310 | rctx = requestcontext(self, repo, req, res) | |
|
309 | 311 | |
|
310 | 312 | # This state is global across all threads. |
|
311 | 313 | encoding.encoding = rctx.config('web', 'encoding') |
|
312 | 314 | rctx.repo.ui.environ = wsgireq.env |
|
313 | 315 | |
|
314 | 316 | if rctx.csp: |
|
315 | 317 | # hgwebdir may have added CSP header. Since we generate our own, |
|
316 | 318 | # replace it. |
|
317 | 319 | wsgireq.headers = [h for h in wsgireq.headers |
|
318 | 320 | if h[0] != 'Content-Security-Policy'] |
|
319 | 321 | wsgireq.headers.append(('Content-Security-Policy', rctx.csp)) |
|
320 | 322 | res.headers['Content-Security-Policy'] = rctx.csp |
|
321 | 323 | |
|
322 | 324 | handled = wireprotoserver.handlewsgirequest( |
|
323 | 325 | rctx, wsgireq, req, res, self.check_perm) |
|
324 | 326 | if handled: |
|
325 | 327 | return res.sendresponse() |
|
326 | 328 | |
|
327 | 329 | if req.havepathinfo: |
|
328 | 330 | query = req.dispatchpath |
|
329 | 331 | else: |
|
330 | 332 | query = req.querystring.partition('&')[0].partition(';')[0] |
|
331 | 333 | |
|
332 | 334 | # translate user-visible url structure to internal structure |
|
333 | 335 | |
|
334 | 336 | args = query.split('/', 2) |
|
335 | 337 | if 'cmd' not in req.qsparams and args and args[0]: |
|
336 | 338 | cmd = args.pop(0) |
|
337 | 339 | style = cmd.rfind('-') |
|
338 | 340 | if style != -1: |
|
339 | 341 | req.qsparams['style'] = cmd[:style] |
|
340 | 342 | cmd = cmd[style + 1:] |
|
341 | 343 | |
|
342 | 344 | # avoid accepting e.g. style parameter as command |
|
343 | 345 | if util.safehasattr(webcommands, cmd): |
|
344 | 346 | req.qsparams['cmd'] = cmd |
|
345 | 347 | |
|
346 | 348 | if cmd == 'static': |
|
347 | 349 | req.qsparams['file'] = '/'.join(args) |
|
348 | 350 | else: |
|
349 | 351 | if args and args[0]: |
|
350 | 352 | node = args.pop(0).replace('%2F', '/') |
|
351 | 353 | req.qsparams['node'] = node |
|
352 | 354 | if args: |
|
353 | 355 | if 'file' in req.qsparams: |
|
354 | 356 | del req.qsparams['file'] |
|
355 | 357 | for a in args: |
|
356 | 358 | req.qsparams.add('file', a) |
|
357 | 359 | |
|
358 | 360 | ua = req.headers.get('User-Agent', '') |
|
359 | 361 | if cmd == 'rev' and 'mercurial' in ua: |
|
360 | 362 | req.qsparams['style'] = 'raw' |
|
361 | 363 | |
|
362 | 364 | if cmd == 'archive': |
|
363 | 365 | fn = req.qsparams['node'] |
|
364 | 366 | for type_, spec in rctx.archivespecs.iteritems(): |
|
365 | 367 | ext = spec[2] |
|
366 | 368 | if fn.endswith(ext): |
|
367 | 369 | req.qsparams['node'] = fn[:-len(ext)] |
|
368 | 370 | req.qsparams['type'] = type_ |
|
369 | 371 | else: |
|
370 | 372 | cmd = req.qsparams.get('cmd', '') |
|
371 | 373 | |
|
372 | 374 | # process the web interface request |
|
373 | 375 | |
|
374 | 376 | try: |
|
375 | 377 | tmpl = rctx.templater(req) |
|
376 | 378 | ctype = tmpl('mimetype', encoding=encoding.encoding) |
|
377 | 379 | ctype = templater.stringify(ctype) |
|
378 | 380 | |
|
379 | 381 | # check read permissions non-static content |
|
380 | 382 | if cmd != 'static': |
|
381 | 383 | self.check_perm(rctx, wsgireq, None) |
|
382 | 384 | |
|
383 | 385 | if cmd == '': |
|
384 | 386 | req.qsparams['cmd'] = tmpl.cache['default'] |
|
385 | 387 | cmd = req.qsparams['cmd'] |
|
386 | 388 | |
|
387 | 389 | # Don't enable caching if using a CSP nonce because then it wouldn't |
|
388 | 390 | # be a nonce. |
|
389 | 391 | if rctx.configbool('web', 'cache') and not rctx.nonce: |
|
390 | 392 | tag = 'W/"%d"' % self.mtime |
|
391 | 393 | if req.headers.get('If-None-Match') == tag: |
|
392 | 394 | raise ErrorResponse(HTTP_NOT_MODIFIED) |
|
393 | 395 | |
|
394 | 396 | wsgireq.headers.append((r'ETag', pycompat.sysstr(tag))) |
|
395 | 397 | res.headers['ETag'] = tag |
|
396 | 398 | |
|
397 | 399 | if cmd not in webcommands.__all__: |
|
398 | 400 | msg = 'no such method: %s' % cmd |
|
399 | 401 | raise ErrorResponse(HTTP_BAD_REQUEST, msg) |
|
400 | 402 | elif cmd == 'file' and req.qsparams.get('style') == 'raw': |
|
401 | 403 | rctx.ctype = ctype |
|
402 | 404 | content = webcommands.rawfile(rctx, wsgireq, tmpl) |
|
403 | 405 | else: |
|
406 | # Set some globals appropriate for web handlers. Commands can | |
|
407 | # override easily enough. | |
|
408 | res.status = '200 Script output follows' | |
|
409 | res.headers['Content-Type'] = ctype | |
|
404 | 410 | content = getattr(webcommands, cmd)(rctx, wsgireq, tmpl) |
|
411 | ||
|
412 | if content is res: | |
|
413 | return res.sendresponse() | |
|
414 | ||
|
405 | 415 | wsgireq.respond(HTTP_OK, ctype) |
|
406 | 416 | |
|
407 | 417 | return content |
|
408 | 418 | |
|
409 | 419 | except (error.LookupError, error.RepoLookupError) as err: |
|
410 | 420 | wsgireq.respond(HTTP_NOT_FOUND, ctype) |
|
411 | 421 | msg = pycompat.bytestr(err) |
|
412 | 422 | if (util.safehasattr(err, 'name') and |
|
413 | 423 | not isinstance(err, error.ManifestLookupError)): |
|
414 | 424 | msg = 'revision not found: %s' % err.name |
|
415 | 425 | return tmpl('error', error=msg) |
|
416 | 426 | except (error.RepoError, error.RevlogError) as inst: |
|
417 | 427 | wsgireq.respond(HTTP_SERVER_ERROR, ctype) |
|
418 | 428 | return tmpl('error', error=pycompat.bytestr(inst)) |
|
419 | 429 | except ErrorResponse as inst: |
|
420 | 430 | wsgireq.respond(inst, ctype) |
|
421 | 431 | if inst.code == HTTP_NOT_MODIFIED: |
|
422 | 432 | # Not allowed to return a body on a 304 |
|
423 | 433 | return [''] |
|
424 | 434 | return tmpl('error', error=pycompat.bytestr(inst)) |
|
425 | 435 | |
|
426 | 436 | def check_perm(self, rctx, req, op): |
|
427 | 437 | for permhook in permhooks: |
|
428 | 438 | permhook(rctx, req, op) |
|
429 | 439 | |
|
430 | 440 | def getwebview(repo): |
|
431 | 441 | """The 'web.view' config controls changeset filter to hgweb. Possible |
|
432 | 442 | values are ``served``, ``visible`` and ``all``. Default is ``served``. |
|
433 | 443 | The ``served`` filter only shows changesets that can be pulled from the |
|
434 | 444 | hgweb instance. The``visible`` filter includes secret changesets but |
|
435 | 445 | still excludes "hidden" one. |
|
436 | 446 | |
|
437 | 447 | See the repoview module for details. |
|
438 | 448 | |
|
439 | 449 | The option has been around undocumented since Mercurial 2.5, but no |
|
440 | 450 | user ever asked about it. So we better keep it undocumented for now.""" |
|
441 | 451 | # experimental config: web.view |
|
442 | 452 | viewconfig = repo.ui.config('web', 'view', untrusted=True) |
|
443 | 453 | if viewconfig == 'all': |
|
444 | 454 | return repo.unfiltered() |
|
445 | 455 | elif viewconfig in repoview.filtertable: |
|
446 | 456 | return repo.filtered(viewconfig) |
|
447 | 457 | else: |
|
448 | 458 | return repo.filtered('served') |
@@ -1,1411 +1,1424 b'' | |||
|
1 | 1 | # |
|
2 | 2 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
3 | 3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | import copy |
|
11 | 11 | import mimetypes |
|
12 | 12 | import os |
|
13 | 13 | import re |
|
14 | 14 | |
|
15 | 15 | from ..i18n import _ |
|
16 | 16 | from ..node import hex, nullid, short |
|
17 | 17 | |
|
18 | 18 | from .common import ( |
|
19 | 19 | ErrorResponse, |
|
20 | 20 | HTTP_FORBIDDEN, |
|
21 | 21 | HTTP_NOT_FOUND, |
|
22 | 22 | HTTP_OK, |
|
23 | 23 | get_contact, |
|
24 | 24 | paritygen, |
|
25 | 25 | staticfile, |
|
26 | 26 | ) |
|
27 | 27 | |
|
28 | 28 | from .. import ( |
|
29 | 29 | archival, |
|
30 | 30 | dagop, |
|
31 | 31 | encoding, |
|
32 | 32 | error, |
|
33 | 33 | graphmod, |
|
34 | 34 | pycompat, |
|
35 | 35 | revset, |
|
36 | 36 | revsetlang, |
|
37 | 37 | scmutil, |
|
38 | 38 | smartset, |
|
39 | 39 | templater, |
|
40 | 40 | util, |
|
41 | 41 | ) |
|
42 | 42 | |
|
43 | 43 | from . import ( |
|
44 | 44 | webutil, |
|
45 | 45 | ) |
|
46 | 46 | |
|
47 | 47 | __all__ = [] |
|
48 | 48 | commands = {} |
|
49 | 49 | |
|
50 | 50 | class webcommand(object): |
|
51 | 51 | """Decorator used to register a web command handler. |
|
52 | 52 | |
|
53 | 53 | The decorator takes as its positional arguments the name/path the |
|
54 | 54 | command should be accessible under. |
|
55 | 55 | |
|
56 | When called, functions receive as arguments a ``requestcontext``, | |
|
57 | ``wsgirequest``, and a templater instance for generatoring output. | |
|
58 | The functions should populate the ``rctx.res`` object with details | |
|
59 | about the HTTP response. | |
|
60 | ||
|
61 | The function can return the ``requestcontext.res`` instance to signal | |
|
62 | that it wants to use this object to generate the response. If an iterable | |
|
63 | is returned, the ``wsgirequest`` instance will be used and the returned | |
|
64 | content will constitute the response body. | |
|
65 | ||
|
56 | 66 | Usage: |
|
57 | 67 | |
|
58 | 68 | @webcommand('mycommand') |
|
59 | 69 | def mycommand(web, req, tmpl): |
|
60 | 70 | pass |
|
61 | 71 | """ |
|
62 | 72 | |
|
63 | 73 | def __init__(self, name): |
|
64 | 74 | self.name = name |
|
65 | 75 | |
|
66 | 76 | def __call__(self, func): |
|
67 | 77 | __all__.append(self.name) |
|
68 | 78 | commands[self.name] = func |
|
69 | 79 | return func |
|
70 | 80 | |
|
71 | 81 | @webcommand('log') |
|
72 | 82 | def log(web, req, tmpl): |
|
73 | 83 | """ |
|
74 | 84 | /log[/{revision}[/{path}]] |
|
75 | 85 | -------------------------- |
|
76 | 86 | |
|
77 | 87 | Show repository or file history. |
|
78 | 88 | |
|
79 | 89 | For URLs of the form ``/log/{revision}``, a list of changesets starting at |
|
80 | 90 | the specified changeset identifier is shown. If ``{revision}`` is not |
|
81 | 91 | defined, the default is ``tip``. This form is equivalent to the |
|
82 | 92 | ``changelog`` handler. |
|
83 | 93 | |
|
84 | 94 | For URLs of the form ``/log/{revision}/{file}``, the history for a specific |
|
85 | 95 | file will be shown. This form is equivalent to the ``filelog`` handler. |
|
86 | 96 | """ |
|
87 | 97 | |
|
88 | 98 | if req.req.qsparams.get('file'): |
|
89 | 99 | return filelog(web, req, tmpl) |
|
90 | 100 | else: |
|
91 | 101 | return changelog(web, req, tmpl) |
|
92 | 102 | |
|
93 | 103 | @webcommand('rawfile') |
|
94 | 104 | def rawfile(web, req, tmpl): |
|
95 | 105 | guessmime = web.configbool('web', 'guessmime') |
|
96 | 106 | |
|
97 | 107 | path = webutil.cleanpath(web.repo, req.req.qsparams.get('file', '')) |
|
98 | 108 | if not path: |
|
99 | 109 | content = manifest(web, req, tmpl) |
|
100 | 110 | req.respond(HTTP_OK, web.ctype) |
|
101 | 111 | return content |
|
102 | 112 | |
|
103 | 113 | try: |
|
104 | 114 | fctx = webutil.filectx(web.repo, req) |
|
105 | 115 | except error.LookupError as inst: |
|
106 | 116 | try: |
|
107 | 117 | content = manifest(web, req, tmpl) |
|
108 | 118 | req.respond(HTTP_OK, web.ctype) |
|
109 | 119 | return content |
|
110 | 120 | except ErrorResponse: |
|
111 | 121 | raise inst |
|
112 | 122 | |
|
113 | 123 | path = fctx.path() |
|
114 | 124 | text = fctx.data() |
|
115 | 125 | mt = 'application/binary' |
|
116 | 126 | if guessmime: |
|
117 | 127 | mt = mimetypes.guess_type(path)[0] |
|
118 | 128 | if mt is None: |
|
119 | 129 | if util.binary(text): |
|
120 | 130 | mt = 'application/binary' |
|
121 | 131 | else: |
|
122 | 132 | mt = 'text/plain' |
|
123 | 133 | if mt.startswith('text/'): |
|
124 | 134 | mt += '; charset="%s"' % encoding.encoding |
|
125 | 135 | |
|
126 | 136 | req.respond(HTTP_OK, mt, path, body=text) |
|
127 | 137 | return [] |
|
128 | 138 | |
|
129 | 139 | def _filerevision(web, req, tmpl, fctx): |
|
130 | 140 | f = fctx.path() |
|
131 | 141 | text = fctx.data() |
|
132 | 142 | parity = paritygen(web.stripecount) |
|
133 | 143 | ishead = fctx.filerev() in fctx.filelog().headrevs() |
|
134 | 144 | |
|
135 | 145 | if util.binary(text): |
|
136 | 146 | mt = mimetypes.guess_type(f)[0] or 'application/octet-stream' |
|
137 | 147 | text = '(binary:%s)' % mt |
|
138 | 148 | |
|
139 | 149 | def lines(): |
|
140 | 150 | for lineno, t in enumerate(text.splitlines(True)): |
|
141 | 151 | yield {"line": t, |
|
142 | 152 | "lineid": "l%d" % (lineno + 1), |
|
143 | 153 | "linenumber": "% 6d" % (lineno + 1), |
|
144 | 154 | "parity": next(parity)} |
|
145 | 155 | |
|
146 | 156 | return tmpl("filerevision", |
|
147 | 157 | file=f, |
|
148 | 158 | path=webutil.up(f), |
|
149 | 159 | text=lines(), |
|
150 | 160 | symrev=webutil.symrevorshortnode(req, fctx), |
|
151 | 161 | rename=webutil.renamelink(fctx), |
|
152 | 162 | permissions=fctx.manifest().flags(f), |
|
153 | 163 | ishead=int(ishead), |
|
154 | 164 | **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))) |
|
155 | 165 | |
|
156 | 166 | @webcommand('file') |
|
157 | 167 | def file(web, req, tmpl): |
|
158 | 168 | """ |
|
159 | 169 | /file/{revision}[/{path}] |
|
160 | 170 | ------------------------- |
|
161 | 171 | |
|
162 | 172 | Show information about a directory or file in the repository. |
|
163 | 173 | |
|
164 | 174 | Info about the ``path`` given as a URL parameter will be rendered. |
|
165 | 175 | |
|
166 | 176 | If ``path`` is a directory, information about the entries in that |
|
167 | 177 | directory will be rendered. This form is equivalent to the ``manifest`` |
|
168 | 178 | handler. |
|
169 | 179 | |
|
170 | 180 | If ``path`` is a file, information about that file will be shown via |
|
171 | 181 | the ``filerevision`` template. |
|
172 | 182 | |
|
173 | 183 | If ``path`` is not defined, information about the root directory will |
|
174 | 184 | be rendered. |
|
175 | 185 | """ |
|
176 | 186 | path = webutil.cleanpath(web.repo, req.req.qsparams.get('file', '')) |
|
177 | 187 | if not path: |
|
178 | 188 | return manifest(web, req, tmpl) |
|
179 | 189 | try: |
|
180 | 190 | return _filerevision(web, req, tmpl, webutil.filectx(web.repo, req)) |
|
181 | 191 | except error.LookupError as inst: |
|
182 | 192 | try: |
|
183 | 193 | return manifest(web, req, tmpl) |
|
184 | 194 | except ErrorResponse: |
|
185 | 195 | raise inst |
|
186 | 196 | |
|
187 | 197 | def _search(web, req, tmpl): |
|
188 | 198 | MODE_REVISION = 'rev' |
|
189 | 199 | MODE_KEYWORD = 'keyword' |
|
190 | 200 | MODE_REVSET = 'revset' |
|
191 | 201 | |
|
192 | 202 | def revsearch(ctx): |
|
193 | 203 | yield ctx |
|
194 | 204 | |
|
195 | 205 | def keywordsearch(query): |
|
196 | 206 | lower = encoding.lower |
|
197 | 207 | qw = lower(query).split() |
|
198 | 208 | |
|
199 | 209 | def revgen(): |
|
200 | 210 | cl = web.repo.changelog |
|
201 | 211 | for i in xrange(len(web.repo) - 1, 0, -100): |
|
202 | 212 | l = [] |
|
203 | 213 | for j in cl.revs(max(0, i - 99), i): |
|
204 | 214 | ctx = web.repo[j] |
|
205 | 215 | l.append(ctx) |
|
206 | 216 | l.reverse() |
|
207 | 217 | for e in l: |
|
208 | 218 | yield e |
|
209 | 219 | |
|
210 | 220 | for ctx in revgen(): |
|
211 | 221 | miss = 0 |
|
212 | 222 | for q in qw: |
|
213 | 223 | if not (q in lower(ctx.user()) or |
|
214 | 224 | q in lower(ctx.description()) or |
|
215 | 225 | q in lower(" ".join(ctx.files()))): |
|
216 | 226 | miss = 1 |
|
217 | 227 | break |
|
218 | 228 | if miss: |
|
219 | 229 | continue |
|
220 | 230 | |
|
221 | 231 | yield ctx |
|
222 | 232 | |
|
223 | 233 | def revsetsearch(revs): |
|
224 | 234 | for r in revs: |
|
225 | 235 | yield web.repo[r] |
|
226 | 236 | |
|
227 | 237 | searchfuncs = { |
|
228 | 238 | MODE_REVISION: (revsearch, 'exact revision search'), |
|
229 | 239 | MODE_KEYWORD: (keywordsearch, 'literal keyword search'), |
|
230 | 240 | MODE_REVSET: (revsetsearch, 'revset expression search'), |
|
231 | 241 | } |
|
232 | 242 | |
|
233 | 243 | def getsearchmode(query): |
|
234 | 244 | try: |
|
235 | 245 | ctx = web.repo[query] |
|
236 | 246 | except (error.RepoError, error.LookupError): |
|
237 | 247 | # query is not an exact revision pointer, need to |
|
238 | 248 | # decide if it's a revset expression or keywords |
|
239 | 249 | pass |
|
240 | 250 | else: |
|
241 | 251 | return MODE_REVISION, ctx |
|
242 | 252 | |
|
243 | 253 | revdef = 'reverse(%s)' % query |
|
244 | 254 | try: |
|
245 | 255 | tree = revsetlang.parse(revdef) |
|
246 | 256 | except error.ParseError: |
|
247 | 257 | # can't parse to a revset tree |
|
248 | 258 | return MODE_KEYWORD, query |
|
249 | 259 | |
|
250 | 260 | if revsetlang.depth(tree) <= 2: |
|
251 | 261 | # no revset syntax used |
|
252 | 262 | return MODE_KEYWORD, query |
|
253 | 263 | |
|
254 | 264 | if any((token, (value or '')[:3]) == ('string', 're:') |
|
255 | 265 | for token, value, pos in revsetlang.tokenize(revdef)): |
|
256 | 266 | return MODE_KEYWORD, query |
|
257 | 267 | |
|
258 | 268 | funcsused = revsetlang.funcsused(tree) |
|
259 | 269 | if not funcsused.issubset(revset.safesymbols): |
|
260 | 270 | return MODE_KEYWORD, query |
|
261 | 271 | |
|
262 | 272 | mfunc = revset.match(web.repo.ui, revdef, repo=web.repo) |
|
263 | 273 | try: |
|
264 | 274 | revs = mfunc(web.repo) |
|
265 | 275 | return MODE_REVSET, revs |
|
266 | 276 | # ParseError: wrongly placed tokens, wrongs arguments, etc |
|
267 | 277 | # RepoLookupError: no such revision, e.g. in 'revision:' |
|
268 | 278 | # Abort: bookmark/tag not exists |
|
269 | 279 | # LookupError: ambiguous identifier, e.g. in '(bc)' on a large repo |
|
270 | 280 | except (error.ParseError, error.RepoLookupError, error.Abort, |
|
271 | 281 | LookupError): |
|
272 | 282 | return MODE_KEYWORD, query |
|
273 | 283 | |
|
274 | 284 | def changelist(**map): |
|
275 | 285 | count = 0 |
|
276 | 286 | |
|
277 | 287 | for ctx in searchfunc[0](funcarg): |
|
278 | 288 | count += 1 |
|
279 | 289 | n = ctx.node() |
|
280 | 290 | showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n) |
|
281 | 291 | files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles) |
|
282 | 292 | |
|
283 | 293 | yield tmpl('searchentry', |
|
284 | 294 | parity=next(parity), |
|
285 | 295 | changelogtag=showtags, |
|
286 | 296 | files=files, |
|
287 | 297 | **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))) |
|
288 | 298 | |
|
289 | 299 | if count >= revcount: |
|
290 | 300 | break |
|
291 | 301 | |
|
292 | 302 | query = req.req.qsparams['rev'] |
|
293 | 303 | revcount = web.maxchanges |
|
294 | 304 | if 'revcount' in req.req.qsparams: |
|
295 | 305 | try: |
|
296 | 306 | revcount = int(req.req.qsparams.get('revcount', revcount)) |
|
297 | 307 | revcount = max(revcount, 1) |
|
298 | 308 | tmpl.defaults['sessionvars']['revcount'] = revcount |
|
299 | 309 | except ValueError: |
|
300 | 310 | pass |
|
301 | 311 | |
|
302 | 312 | lessvars = copy.copy(tmpl.defaults['sessionvars']) |
|
303 | 313 | lessvars['revcount'] = max(revcount // 2, 1) |
|
304 | 314 | lessvars['rev'] = query |
|
305 | 315 | morevars = copy.copy(tmpl.defaults['sessionvars']) |
|
306 | 316 | morevars['revcount'] = revcount * 2 |
|
307 | 317 | morevars['rev'] = query |
|
308 | 318 | |
|
309 | 319 | mode, funcarg = getsearchmode(query) |
|
310 | 320 | |
|
311 | 321 | if 'forcekw' in req.req.qsparams: |
|
312 | 322 | showforcekw = '' |
|
313 | 323 | showunforcekw = searchfuncs[mode][1] |
|
314 | 324 | mode = MODE_KEYWORD |
|
315 | 325 | funcarg = query |
|
316 | 326 | else: |
|
317 | 327 | if mode != MODE_KEYWORD: |
|
318 | 328 | showforcekw = searchfuncs[MODE_KEYWORD][1] |
|
319 | 329 | else: |
|
320 | 330 | showforcekw = '' |
|
321 | 331 | showunforcekw = '' |
|
322 | 332 | |
|
323 | 333 | searchfunc = searchfuncs[mode] |
|
324 | 334 | |
|
325 | 335 | tip = web.repo['tip'] |
|
326 | 336 | parity = paritygen(web.stripecount) |
|
327 | 337 | |
|
328 | 338 | return tmpl('search', query=query, node=tip.hex(), symrev='tip', |
|
329 | 339 | entries=changelist, archives=web.archivelist("tip"), |
|
330 | 340 | morevars=morevars, lessvars=lessvars, |
|
331 | 341 | modedesc=searchfunc[1], |
|
332 | 342 | showforcekw=showforcekw, showunforcekw=showunforcekw) |
|
333 | 343 | |
|
334 | 344 | @webcommand('changelog') |
|
335 | 345 | def changelog(web, req, tmpl, shortlog=False): |
|
336 | 346 | """ |
|
337 | 347 | /changelog[/{revision}] |
|
338 | 348 | ----------------------- |
|
339 | 349 | |
|
340 | 350 | Show information about multiple changesets. |
|
341 | 351 | |
|
342 | 352 | If the optional ``revision`` URL argument is absent, information about |
|
343 | 353 | all changesets starting at ``tip`` will be rendered. If the ``revision`` |
|
344 | 354 | argument is present, changesets will be shown starting from the specified |
|
345 | 355 | revision. |
|
346 | 356 | |
|
347 | 357 | If ``revision`` is absent, the ``rev`` query string argument may be |
|
348 | 358 | defined. This will perform a search for changesets. |
|
349 | 359 | |
|
350 | 360 | The argument for ``rev`` can be a single revision, a revision set, |
|
351 | 361 | or a literal keyword to search for in changeset data (equivalent to |
|
352 | 362 | :hg:`log -k`). |
|
353 | 363 | |
|
354 | 364 | The ``revcount`` query string argument defines the maximum numbers of |
|
355 | 365 | changesets to render. |
|
356 | 366 | |
|
357 | 367 | For non-searches, the ``changelog`` template will be rendered. |
|
358 | 368 | """ |
|
359 | 369 | |
|
360 | 370 | query = '' |
|
361 | 371 | if 'node' in req.req.qsparams: |
|
362 | 372 | ctx = webutil.changectx(web.repo, req) |
|
363 | 373 | symrev = webutil.symrevorshortnode(req, ctx) |
|
364 | 374 | elif 'rev' in req.req.qsparams: |
|
365 | 375 | return _search(web, req, tmpl) |
|
366 | 376 | else: |
|
367 | 377 | ctx = web.repo['tip'] |
|
368 | 378 | symrev = 'tip' |
|
369 | 379 | |
|
370 | 380 | def changelist(): |
|
371 | 381 | revs = [] |
|
372 | 382 | if pos != -1: |
|
373 | 383 | revs = web.repo.changelog.revs(pos, 0) |
|
374 | 384 | curcount = 0 |
|
375 | 385 | for rev in revs: |
|
376 | 386 | curcount += 1 |
|
377 | 387 | if curcount > revcount + 1: |
|
378 | 388 | break |
|
379 | 389 | |
|
380 | 390 | entry = webutil.changelistentry(web, web.repo[rev], tmpl) |
|
381 | 391 | entry['parity'] = next(parity) |
|
382 | 392 | yield entry |
|
383 | 393 | |
|
384 | 394 | if shortlog: |
|
385 | 395 | revcount = web.maxshortchanges |
|
386 | 396 | else: |
|
387 | 397 | revcount = web.maxchanges |
|
388 | 398 | |
|
389 | 399 | if 'revcount' in req.req.qsparams: |
|
390 | 400 | try: |
|
391 | 401 | revcount = int(req.req.qsparams.get('revcount', revcount)) |
|
392 | 402 | revcount = max(revcount, 1) |
|
393 | 403 | tmpl.defaults['sessionvars']['revcount'] = revcount |
|
394 | 404 | except ValueError: |
|
395 | 405 | pass |
|
396 | 406 | |
|
397 | 407 | lessvars = copy.copy(tmpl.defaults['sessionvars']) |
|
398 | 408 | lessvars['revcount'] = max(revcount // 2, 1) |
|
399 | 409 | morevars = copy.copy(tmpl.defaults['sessionvars']) |
|
400 | 410 | morevars['revcount'] = revcount * 2 |
|
401 | 411 | |
|
402 | 412 | count = len(web.repo) |
|
403 | 413 | pos = ctx.rev() |
|
404 | 414 | parity = paritygen(web.stripecount) |
|
405 | 415 | |
|
406 | 416 | changenav = webutil.revnav(web.repo).gen(pos, revcount, count) |
|
407 | 417 | |
|
408 | 418 | entries = list(changelist()) |
|
409 | 419 | latestentry = entries[:1] |
|
410 | 420 | if len(entries) > revcount: |
|
411 | 421 | nextentry = entries[-1:] |
|
412 | 422 | entries = entries[:-1] |
|
413 | 423 | else: |
|
414 | 424 | nextentry = [] |
|
415 | 425 | |
|
416 | 426 | return tmpl('shortlog' if shortlog else 'changelog', changenav=changenav, |
|
417 | 427 | node=ctx.hex(), rev=pos, symrev=symrev, changesets=count, |
|
418 | 428 | entries=entries, |
|
419 | 429 | latestentry=latestentry, nextentry=nextentry, |
|
420 | 430 | archives=web.archivelist("tip"), revcount=revcount, |
|
421 | 431 | morevars=morevars, lessvars=lessvars, query=query) |
|
422 | 432 | |
|
423 | 433 | @webcommand('shortlog') |
|
424 | 434 | def shortlog(web, req, tmpl): |
|
425 | 435 | """ |
|
426 | 436 | /shortlog |
|
427 | 437 | --------- |
|
428 | 438 | |
|
429 | 439 | Show basic information about a set of changesets. |
|
430 | 440 | |
|
431 | 441 | This accepts the same parameters as the ``changelog`` handler. The only |
|
432 | 442 | difference is the ``shortlog`` template will be rendered instead of the |
|
433 | 443 | ``changelog`` template. |
|
434 | 444 | """ |
|
435 | 445 | return changelog(web, req, tmpl, shortlog=True) |
|
436 | 446 | |
|
437 | 447 | @webcommand('changeset') |
|
438 | 448 | def changeset(web, req, tmpl): |
|
439 | 449 | """ |
|
440 | 450 | /changeset[/{revision}] |
|
441 | 451 | ----------------------- |
|
442 | 452 | |
|
443 | 453 | Show information about a single changeset. |
|
444 | 454 | |
|
445 | 455 | A URL path argument is the changeset identifier to show. See ``hg help |
|
446 | 456 | revisions`` for possible values. If not defined, the ``tip`` changeset |
|
447 | 457 | will be shown. |
|
448 | 458 | |
|
449 | 459 | The ``changeset`` template is rendered. Contents of the ``changesettag``, |
|
450 | 460 | ``changesetbookmark``, ``filenodelink``, ``filenolink``, and the many |
|
451 | 461 | templates related to diffs may all be used to produce the output. |
|
452 | 462 | """ |
|
453 | 463 | ctx = webutil.changectx(web.repo, req) |
|
454 | 464 | |
|
455 | 465 | return tmpl('changeset', **webutil.changesetentry(web, req, tmpl, ctx)) |
|
456 | 466 | |
|
457 | 467 | rev = webcommand('rev')(changeset) |
|
458 | 468 | |
|
459 | 469 | def decodepath(path): |
|
460 | 470 | """Hook for mapping a path in the repository to a path in the |
|
461 | 471 | working copy. |
|
462 | 472 | |
|
463 | 473 | Extensions (e.g., largefiles) can override this to remap files in |
|
464 | 474 | the virtual file system presented by the manifest command below.""" |
|
465 | 475 | return path |
|
466 | 476 | |
|
467 | 477 | @webcommand('manifest') |
|
468 | 478 | def manifest(web, req, tmpl): |
|
469 | 479 | """ |
|
470 | 480 | /manifest[/{revision}[/{path}]] |
|
471 | 481 | ------------------------------- |
|
472 | 482 | |
|
473 | 483 | Show information about a directory. |
|
474 | 484 | |
|
475 | 485 | If the URL path arguments are omitted, information about the root |
|
476 | 486 | directory for the ``tip`` changeset will be shown. |
|
477 | 487 | |
|
478 | 488 | Because this handler can only show information for directories, it |
|
479 | 489 | is recommended to use the ``file`` handler instead, as it can handle both |
|
480 | 490 | directories and files. |
|
481 | 491 | |
|
482 | 492 | The ``manifest`` template will be rendered for this handler. |
|
483 | 493 | """ |
|
484 | 494 | if 'node' in req.req.qsparams: |
|
485 | 495 | ctx = webutil.changectx(web.repo, req) |
|
486 | 496 | symrev = webutil.symrevorshortnode(req, ctx) |
|
487 | 497 | else: |
|
488 | 498 | ctx = web.repo['tip'] |
|
489 | 499 | symrev = 'tip' |
|
490 | 500 | path = webutil.cleanpath(web.repo, req.req.qsparams.get('file', '')) |
|
491 | 501 | mf = ctx.manifest() |
|
492 | 502 | node = ctx.node() |
|
493 | 503 | |
|
494 | 504 | files = {} |
|
495 | 505 | dirs = {} |
|
496 | 506 | parity = paritygen(web.stripecount) |
|
497 | 507 | |
|
498 | 508 | if path and path[-1:] != "/": |
|
499 | 509 | path += "/" |
|
500 | 510 | l = len(path) |
|
501 | 511 | abspath = "/" + path |
|
502 | 512 | |
|
503 | 513 | for full, n in mf.iteritems(): |
|
504 | 514 | # the virtual path (working copy path) used for the full |
|
505 | 515 | # (repository) path |
|
506 | 516 | f = decodepath(full) |
|
507 | 517 | |
|
508 | 518 | if f[:l] != path: |
|
509 | 519 | continue |
|
510 | 520 | remain = f[l:] |
|
511 | 521 | elements = remain.split('/') |
|
512 | 522 | if len(elements) == 1: |
|
513 | 523 | files[remain] = full |
|
514 | 524 | else: |
|
515 | 525 | h = dirs # need to retain ref to dirs (root) |
|
516 | 526 | for elem in elements[0:-1]: |
|
517 | 527 | if elem not in h: |
|
518 | 528 | h[elem] = {} |
|
519 | 529 | h = h[elem] |
|
520 | 530 | if len(h) > 1: |
|
521 | 531 | break |
|
522 | 532 | h[None] = None # denotes files present |
|
523 | 533 | |
|
524 | 534 | if mf and not files and not dirs: |
|
525 | 535 | raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path) |
|
526 | 536 | |
|
527 | 537 | def filelist(**map): |
|
528 | 538 | for f in sorted(files): |
|
529 | 539 | full = files[f] |
|
530 | 540 | |
|
531 | 541 | fctx = ctx.filectx(full) |
|
532 | 542 | yield {"file": full, |
|
533 | 543 | "parity": next(parity), |
|
534 | 544 | "basename": f, |
|
535 | 545 | "date": fctx.date(), |
|
536 | 546 | "size": fctx.size(), |
|
537 | 547 | "permissions": mf.flags(full)} |
|
538 | 548 | |
|
539 | 549 | def dirlist(**map): |
|
540 | 550 | for d in sorted(dirs): |
|
541 | 551 | |
|
542 | 552 | emptydirs = [] |
|
543 | 553 | h = dirs[d] |
|
544 | 554 | while isinstance(h, dict) and len(h) == 1: |
|
545 | 555 | k, v = next(iter(h.items())) |
|
546 | 556 | if v: |
|
547 | 557 | emptydirs.append(k) |
|
548 | 558 | h = v |
|
549 | 559 | |
|
550 | 560 | path = "%s%s" % (abspath, d) |
|
551 | 561 | yield {"parity": next(parity), |
|
552 | 562 | "path": path, |
|
553 | 563 | "emptydirs": "/".join(emptydirs), |
|
554 | 564 | "basename": d} |
|
555 | 565 | |
|
556 | 566 | return tmpl("manifest", |
|
557 | 567 | symrev=symrev, |
|
558 | 568 | path=abspath, |
|
559 | 569 | up=webutil.up(abspath), |
|
560 | 570 | upparity=next(parity), |
|
561 | 571 | fentries=filelist, |
|
562 | 572 | dentries=dirlist, |
|
563 | 573 | archives=web.archivelist(hex(node)), |
|
564 | 574 | **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))) |
|
565 | 575 | |
|
566 | 576 | @webcommand('tags') |
|
567 | 577 | def tags(web, req, tmpl): |
|
568 | 578 | """ |
|
569 | 579 | /tags |
|
570 | 580 | ----- |
|
571 | 581 | |
|
572 | 582 | Show information about tags. |
|
573 | 583 | |
|
574 | 584 | No arguments are accepted. |
|
575 | 585 | |
|
576 | 586 | The ``tags`` template is rendered. |
|
577 | 587 | """ |
|
578 | 588 | i = list(reversed(web.repo.tagslist())) |
|
579 | 589 | parity = paritygen(web.stripecount) |
|
580 | 590 | |
|
581 | 591 | def entries(notip, latestonly, **map): |
|
582 | 592 | t = i |
|
583 | 593 | if notip: |
|
584 | 594 | t = [(k, n) for k, n in i if k != "tip"] |
|
585 | 595 | if latestonly: |
|
586 | 596 | t = t[:1] |
|
587 | 597 | for k, n in t: |
|
588 | 598 | yield {"parity": next(parity), |
|
589 | 599 | "tag": k, |
|
590 | 600 | "date": web.repo[n].date(), |
|
591 | 601 | "node": hex(n)} |
|
592 | 602 | |
|
593 | 603 | return tmpl("tags", |
|
594 | 604 | node=hex(web.repo.changelog.tip()), |
|
595 | 605 | entries=lambda **x: entries(False, False, **x), |
|
596 | 606 | entriesnotip=lambda **x: entries(True, False, **x), |
|
597 | 607 | latestentry=lambda **x: entries(True, True, **x)) |
|
598 | 608 | |
|
599 | 609 | @webcommand('bookmarks') |
|
600 | 610 | def bookmarks(web, req, tmpl): |
|
601 | 611 | """ |
|
602 | 612 | /bookmarks |
|
603 | 613 | ---------- |
|
604 | 614 | |
|
605 | 615 | Show information about bookmarks. |
|
606 | 616 | |
|
607 | 617 | No arguments are accepted. |
|
608 | 618 | |
|
609 | 619 | The ``bookmarks`` template is rendered. |
|
610 | 620 | """ |
|
611 | 621 | i = [b for b in web.repo._bookmarks.items() if b[1] in web.repo] |
|
612 | 622 | sortkey = lambda b: (web.repo[b[1]].rev(), b[0]) |
|
613 | 623 | i = sorted(i, key=sortkey, reverse=True) |
|
614 | 624 | parity = paritygen(web.stripecount) |
|
615 | 625 | |
|
616 | 626 | def entries(latestonly, **map): |
|
617 | 627 | t = i |
|
618 | 628 | if latestonly: |
|
619 | 629 | t = i[:1] |
|
620 | 630 | for k, n in t: |
|
621 | 631 | yield {"parity": next(parity), |
|
622 | 632 | "bookmark": k, |
|
623 | 633 | "date": web.repo[n].date(), |
|
624 | 634 | "node": hex(n)} |
|
625 | 635 | |
|
626 | 636 | if i: |
|
627 | 637 | latestrev = i[0][1] |
|
628 | 638 | else: |
|
629 | 639 | latestrev = -1 |
|
630 | 640 | |
|
631 | 641 | return tmpl("bookmarks", |
|
632 | 642 | node=hex(web.repo.changelog.tip()), |
|
633 | 643 | lastchange=[{"date": web.repo[latestrev].date()}], |
|
634 | 644 | entries=lambda **x: entries(latestonly=False, **x), |
|
635 | 645 | latestentry=lambda **x: entries(latestonly=True, **x)) |
|
636 | 646 | |
|
637 | 647 | @webcommand('branches') |
|
638 | 648 | def branches(web, req, tmpl): |
|
639 | 649 | """ |
|
640 | 650 | /branches |
|
641 | 651 | --------- |
|
642 | 652 | |
|
643 | 653 | Show information about branches. |
|
644 | 654 | |
|
645 | 655 | All known branches are contained in the output, even closed branches. |
|
646 | 656 | |
|
647 | 657 | No arguments are accepted. |
|
648 | 658 | |
|
649 | 659 | The ``branches`` template is rendered. |
|
650 | 660 | """ |
|
651 | 661 | entries = webutil.branchentries(web.repo, web.stripecount) |
|
652 | 662 | latestentry = webutil.branchentries(web.repo, web.stripecount, 1) |
|
653 | 663 | return tmpl('branches', node=hex(web.repo.changelog.tip()), |
|
654 | 664 | entries=entries, latestentry=latestentry) |
|
655 | 665 | |
|
656 | 666 | @webcommand('summary') |
|
657 | 667 | def summary(web, req, tmpl): |
|
658 | 668 | """ |
|
659 | 669 | /summary |
|
660 | 670 | -------- |
|
661 | 671 | |
|
662 | 672 | Show a summary of repository state. |
|
663 | 673 | |
|
664 | 674 | Information about the latest changesets, bookmarks, tags, and branches |
|
665 | 675 | is captured by this handler. |
|
666 | 676 | |
|
667 | 677 | The ``summary`` template is rendered. |
|
668 | 678 | """ |
|
669 | 679 | i = reversed(web.repo.tagslist()) |
|
670 | 680 | |
|
671 | 681 | def tagentries(**map): |
|
672 | 682 | parity = paritygen(web.stripecount) |
|
673 | 683 | count = 0 |
|
674 | 684 | for k, n in i: |
|
675 | 685 | if k == "tip": # skip tip |
|
676 | 686 | continue |
|
677 | 687 | |
|
678 | 688 | count += 1 |
|
679 | 689 | if count > 10: # limit to 10 tags |
|
680 | 690 | break |
|
681 | 691 | |
|
682 | 692 | yield tmpl("tagentry", |
|
683 | 693 | parity=next(parity), |
|
684 | 694 | tag=k, |
|
685 | 695 | node=hex(n), |
|
686 | 696 | date=web.repo[n].date()) |
|
687 | 697 | |
|
688 | 698 | def bookmarks(**map): |
|
689 | 699 | parity = paritygen(web.stripecount) |
|
690 | 700 | marks = [b for b in web.repo._bookmarks.items() if b[1] in web.repo] |
|
691 | 701 | sortkey = lambda b: (web.repo[b[1]].rev(), b[0]) |
|
692 | 702 | marks = sorted(marks, key=sortkey, reverse=True) |
|
693 | 703 | for k, n in marks[:10]: # limit to 10 bookmarks |
|
694 | 704 | yield {'parity': next(parity), |
|
695 | 705 | 'bookmark': k, |
|
696 | 706 | 'date': web.repo[n].date(), |
|
697 | 707 | 'node': hex(n)} |
|
698 | 708 | |
|
699 | 709 | def changelist(**map): |
|
700 | 710 | parity = paritygen(web.stripecount, offset=start - end) |
|
701 | 711 | l = [] # build a list in forward order for efficiency |
|
702 | 712 | revs = [] |
|
703 | 713 | if start < end: |
|
704 | 714 | revs = web.repo.changelog.revs(start, end - 1) |
|
705 | 715 | for i in revs: |
|
706 | 716 | ctx = web.repo[i] |
|
707 | 717 | |
|
708 | 718 | l.append(tmpl( |
|
709 | 719 | 'shortlogentry', |
|
710 | 720 | parity=next(parity), |
|
711 | 721 | **pycompat.strkwargs(webutil.commonentry(web.repo, ctx)))) |
|
712 | 722 | |
|
713 | 723 | for entry in reversed(l): |
|
714 | 724 | yield entry |
|
715 | 725 | |
|
716 | 726 | tip = web.repo['tip'] |
|
717 | 727 | count = len(web.repo) |
|
718 | 728 | start = max(0, count - web.maxchanges) |
|
719 | 729 | end = min(count, start + web.maxchanges) |
|
720 | 730 | |
|
721 | 731 | desc = web.config("web", "description") |
|
722 | 732 | if not desc: |
|
723 | 733 | desc = 'unknown' |
|
724 | 734 | return tmpl("summary", |
|
725 | 735 | desc=desc, |
|
726 | 736 | owner=get_contact(web.config) or "unknown", |
|
727 | 737 | lastchange=tip.date(), |
|
728 | 738 | tags=tagentries, |
|
729 | 739 | bookmarks=bookmarks, |
|
730 | 740 | branches=webutil.branchentries(web.repo, web.stripecount, 10), |
|
731 | 741 | shortlog=changelist, |
|
732 | 742 | node=tip.hex(), |
|
733 | 743 | symrev='tip', |
|
734 | 744 | archives=web.archivelist("tip"), |
|
735 | 745 | labels=web.configlist('web', 'labels')) |
|
736 | 746 | |
|
737 | 747 | @webcommand('filediff') |
|
738 | 748 | def filediff(web, req, tmpl): |
|
739 | 749 | """ |
|
740 | 750 | /diff/{revision}/{path} |
|
741 | 751 | ----------------------- |
|
742 | 752 | |
|
743 | 753 | Show how a file changed in a particular commit. |
|
744 | 754 | |
|
745 | 755 | The ``filediff`` template is rendered. |
|
746 | 756 | |
|
747 | 757 | This handler is registered under both the ``/diff`` and ``/filediff`` |
|
748 | 758 | paths. ``/diff`` is used in modern code. |
|
749 | 759 | """ |
|
750 | 760 | fctx, ctx = None, None |
|
751 | 761 | try: |
|
752 | 762 | fctx = webutil.filectx(web.repo, req) |
|
753 | 763 | except LookupError: |
|
754 | 764 | ctx = webutil.changectx(web.repo, req) |
|
755 | 765 | path = webutil.cleanpath(web.repo, req.req.qsparams['file']) |
|
756 | 766 | if path not in ctx.files(): |
|
757 | 767 | raise |
|
758 | 768 | |
|
759 | 769 | if fctx is not None: |
|
760 | 770 | path = fctx.path() |
|
761 | 771 | ctx = fctx.changectx() |
|
762 | 772 | basectx = ctx.p1() |
|
763 | 773 | |
|
764 | 774 | style = web.config('web', 'style') |
|
765 | 775 | if 'style' in req.req.qsparams: |
|
766 | 776 | style = req.req.qsparams['style'] |
|
767 | 777 | |
|
768 | 778 | diffs = webutil.diffs(web, tmpl, ctx, basectx, [path], style) |
|
769 | 779 | if fctx is not None: |
|
770 | 780 | rename = webutil.renamelink(fctx) |
|
771 | 781 | ctx = fctx |
|
772 | 782 | else: |
|
773 | 783 | rename = [] |
|
774 | 784 | ctx = ctx |
|
775 | 785 | return tmpl("filediff", |
|
776 | 786 | file=path, |
|
777 | 787 | symrev=webutil.symrevorshortnode(req, ctx), |
|
778 | 788 | rename=rename, |
|
779 | 789 | diff=diffs, |
|
780 | 790 | **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))) |
|
781 | 791 | |
|
782 | 792 | diff = webcommand('diff')(filediff) |
|
783 | 793 | |
|
784 | 794 | @webcommand('comparison') |
|
785 | 795 | def comparison(web, req, tmpl): |
|
786 | 796 | """ |
|
787 | 797 | /comparison/{revision}/{path} |
|
788 | 798 | ----------------------------- |
|
789 | 799 | |
|
790 | 800 | Show a comparison between the old and new versions of a file from changes |
|
791 | 801 | made on a particular revision. |
|
792 | 802 | |
|
793 | 803 | This is similar to the ``diff`` handler. However, this form features |
|
794 | 804 | a split or side-by-side diff rather than a unified diff. |
|
795 | 805 | |
|
796 | 806 | The ``context`` query string argument can be used to control the lines of |
|
797 | 807 | context in the diff. |
|
798 | 808 | |
|
799 | 809 | The ``filecomparison`` template is rendered. |
|
800 | 810 | """ |
|
801 | 811 | ctx = webutil.changectx(web.repo, req) |
|
802 | 812 | if 'file' not in req.req.qsparams: |
|
803 | 813 | raise ErrorResponse(HTTP_NOT_FOUND, 'file not given') |
|
804 | 814 | path = webutil.cleanpath(web.repo, req.req.qsparams['file']) |
|
805 | 815 | |
|
806 | 816 | parsecontext = lambda v: v == 'full' and -1 or int(v) |
|
807 | 817 | if 'context' in req.req.qsparams: |
|
808 | 818 | context = parsecontext(req.req.qsparams['context']) |
|
809 | 819 | else: |
|
810 | 820 | context = parsecontext(web.config('web', 'comparisoncontext', '5')) |
|
811 | 821 | |
|
812 | 822 | def filelines(f): |
|
813 | 823 | if f.isbinary(): |
|
814 | 824 | mt = mimetypes.guess_type(f.path())[0] |
|
815 | 825 | if not mt: |
|
816 | 826 | mt = 'application/octet-stream' |
|
817 | 827 | return [_('(binary file %s, hash: %s)') % (mt, hex(f.filenode()))] |
|
818 | 828 | return f.data().splitlines() |
|
819 | 829 | |
|
820 | 830 | fctx = None |
|
821 | 831 | parent = ctx.p1() |
|
822 | 832 | leftrev = parent.rev() |
|
823 | 833 | leftnode = parent.node() |
|
824 | 834 | rightrev = ctx.rev() |
|
825 | 835 | rightnode = ctx.node() |
|
826 | 836 | if path in ctx: |
|
827 | 837 | fctx = ctx[path] |
|
828 | 838 | rightlines = filelines(fctx) |
|
829 | 839 | if path not in parent: |
|
830 | 840 | leftlines = () |
|
831 | 841 | else: |
|
832 | 842 | pfctx = parent[path] |
|
833 | 843 | leftlines = filelines(pfctx) |
|
834 | 844 | else: |
|
835 | 845 | rightlines = () |
|
836 | 846 | pfctx = ctx.parents()[0][path] |
|
837 | 847 | leftlines = filelines(pfctx) |
|
838 | 848 | |
|
839 | 849 | comparison = webutil.compare(tmpl, context, leftlines, rightlines) |
|
840 | 850 | if fctx is not None: |
|
841 | 851 | rename = webutil.renamelink(fctx) |
|
842 | 852 | ctx = fctx |
|
843 | 853 | else: |
|
844 | 854 | rename = [] |
|
845 | 855 | ctx = ctx |
|
846 | 856 | return tmpl('filecomparison', |
|
847 | 857 | file=path, |
|
848 | 858 | symrev=webutil.symrevorshortnode(req, ctx), |
|
849 | 859 | rename=rename, |
|
850 | 860 | leftrev=leftrev, |
|
851 | 861 | leftnode=hex(leftnode), |
|
852 | 862 | rightrev=rightrev, |
|
853 | 863 | rightnode=hex(rightnode), |
|
854 | 864 | comparison=comparison, |
|
855 | 865 | **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))) |
|
856 | 866 | |
|
857 | 867 | @webcommand('annotate') |
|
858 | 868 | def annotate(web, req, tmpl): |
|
859 | 869 | """ |
|
860 | 870 | /annotate/{revision}/{path} |
|
861 | 871 | --------------------------- |
|
862 | 872 | |
|
863 | 873 | Show changeset information for each line in a file. |
|
864 | 874 | |
|
865 | 875 | The ``ignorews``, ``ignorewsamount``, ``ignorewseol``, and |
|
866 | 876 | ``ignoreblanklines`` query string arguments have the same meaning as |
|
867 | 877 | their ``[annotate]`` config equivalents. It uses the hgrc boolean |
|
868 | 878 | parsing logic to interpret the value. e.g. ``0`` and ``false`` are |
|
869 | 879 | false and ``1`` and ``true`` are true. If not defined, the server |
|
870 | 880 | default settings are used. |
|
871 | 881 | |
|
872 | 882 | The ``fileannotate`` template is rendered. |
|
873 | 883 | """ |
|
874 | 884 | fctx = webutil.filectx(web.repo, req) |
|
875 | 885 | f = fctx.path() |
|
876 | 886 | parity = paritygen(web.stripecount) |
|
877 | 887 | ishead = fctx.filerev() in fctx.filelog().headrevs() |
|
878 | 888 | |
|
879 | 889 | # parents() is called once per line and several lines likely belong to |
|
880 | 890 | # same revision. So it is worth caching. |
|
881 | 891 | # TODO there are still redundant operations within basefilectx.parents() |
|
882 | 892 | # and from the fctx.annotate() call itself that could be cached. |
|
883 | 893 | parentscache = {} |
|
884 | 894 | def parents(f): |
|
885 | 895 | rev = f.rev() |
|
886 | 896 | if rev not in parentscache: |
|
887 | 897 | parentscache[rev] = [] |
|
888 | 898 | for p in f.parents(): |
|
889 | 899 | entry = { |
|
890 | 900 | 'node': p.hex(), |
|
891 | 901 | 'rev': p.rev(), |
|
892 | 902 | } |
|
893 | 903 | parentscache[rev].append(entry) |
|
894 | 904 | |
|
895 | 905 | for p in parentscache[rev]: |
|
896 | 906 | yield p |
|
897 | 907 | |
|
898 | 908 | def annotate(**map): |
|
899 | 909 | if fctx.isbinary(): |
|
900 | 910 | mt = (mimetypes.guess_type(fctx.path())[0] |
|
901 | 911 | or 'application/octet-stream') |
|
902 | 912 | lines = [((fctx.filectx(fctx.filerev()), 1), '(binary:%s)' % mt)] |
|
903 | 913 | else: |
|
904 | 914 | lines = webutil.annotate(req, fctx, web.repo.ui) |
|
905 | 915 | |
|
906 | 916 | previousrev = None |
|
907 | 917 | blockparitygen = paritygen(1) |
|
908 | 918 | for lineno, (aline, l) in enumerate(lines): |
|
909 | 919 | f = aline.fctx |
|
910 | 920 | rev = f.rev() |
|
911 | 921 | if rev != previousrev: |
|
912 | 922 | blockhead = True |
|
913 | 923 | blockparity = next(blockparitygen) |
|
914 | 924 | else: |
|
915 | 925 | blockhead = None |
|
916 | 926 | previousrev = rev |
|
917 | 927 | yield {"parity": next(parity), |
|
918 | 928 | "node": f.hex(), |
|
919 | 929 | "rev": rev, |
|
920 | 930 | "author": f.user(), |
|
921 | 931 | "parents": parents(f), |
|
922 | 932 | "desc": f.description(), |
|
923 | 933 | "extra": f.extra(), |
|
924 | 934 | "file": f.path(), |
|
925 | 935 | "blockhead": blockhead, |
|
926 | 936 | "blockparity": blockparity, |
|
927 | 937 | "targetline": aline.lineno, |
|
928 | 938 | "line": l, |
|
929 | 939 | "lineno": lineno + 1, |
|
930 | 940 | "lineid": "l%d" % (lineno + 1), |
|
931 | 941 | "linenumber": "% 6d" % (lineno + 1), |
|
932 | 942 | "revdate": f.date()} |
|
933 | 943 | |
|
934 | 944 | diffopts = webutil.difffeatureopts(req, web.repo.ui, 'annotate') |
|
935 | 945 | diffopts = {k: getattr(diffopts, k) for k in diffopts.defaults} |
|
936 | 946 | |
|
937 | 947 | return tmpl("fileannotate", |
|
938 | 948 | file=f, |
|
939 | 949 | annotate=annotate, |
|
940 | 950 | path=webutil.up(f), |
|
941 | 951 | symrev=webutil.symrevorshortnode(req, fctx), |
|
942 | 952 | rename=webutil.renamelink(fctx), |
|
943 | 953 | permissions=fctx.manifest().flags(f), |
|
944 | 954 | ishead=int(ishead), |
|
945 | 955 | diffopts=diffopts, |
|
946 | 956 | **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))) |
|
947 | 957 | |
|
948 | 958 | @webcommand('filelog') |
|
949 | 959 | def filelog(web, req, tmpl): |
|
950 | 960 | """ |
|
951 | 961 | /filelog/{revision}/{path} |
|
952 | 962 | -------------------------- |
|
953 | 963 | |
|
954 | 964 | Show information about the history of a file in the repository. |
|
955 | 965 | |
|
956 | 966 | The ``revcount`` query string argument can be defined to control the |
|
957 | 967 | maximum number of entries to show. |
|
958 | 968 | |
|
959 | 969 | The ``filelog`` template will be rendered. |
|
960 | 970 | """ |
|
961 | 971 | |
|
962 | 972 | try: |
|
963 | 973 | fctx = webutil.filectx(web.repo, req) |
|
964 | 974 | f = fctx.path() |
|
965 | 975 | fl = fctx.filelog() |
|
966 | 976 | except error.LookupError: |
|
967 | 977 | f = webutil.cleanpath(web.repo, req.req.qsparams['file']) |
|
968 | 978 | fl = web.repo.file(f) |
|
969 | 979 | numrevs = len(fl) |
|
970 | 980 | if not numrevs: # file doesn't exist at all |
|
971 | 981 | raise |
|
972 | 982 | rev = webutil.changectx(web.repo, req).rev() |
|
973 | 983 | first = fl.linkrev(0) |
|
974 | 984 | if rev < first: # current rev is from before file existed |
|
975 | 985 | raise |
|
976 | 986 | frev = numrevs - 1 |
|
977 | 987 | while fl.linkrev(frev) > rev: |
|
978 | 988 | frev -= 1 |
|
979 | 989 | fctx = web.repo.filectx(f, fl.linkrev(frev)) |
|
980 | 990 | |
|
981 | 991 | revcount = web.maxshortchanges |
|
982 | 992 | if 'revcount' in req.req.qsparams: |
|
983 | 993 | try: |
|
984 | 994 | revcount = int(req.req.qsparams.get('revcount', revcount)) |
|
985 | 995 | revcount = max(revcount, 1) |
|
986 | 996 | tmpl.defaults['sessionvars']['revcount'] = revcount |
|
987 | 997 | except ValueError: |
|
988 | 998 | pass |
|
989 | 999 | |
|
990 | 1000 | lrange = webutil.linerange(req) |
|
991 | 1001 | |
|
992 | 1002 | lessvars = copy.copy(tmpl.defaults['sessionvars']) |
|
993 | 1003 | lessvars['revcount'] = max(revcount // 2, 1) |
|
994 | 1004 | morevars = copy.copy(tmpl.defaults['sessionvars']) |
|
995 | 1005 | morevars['revcount'] = revcount * 2 |
|
996 | 1006 | |
|
997 | 1007 | patch = 'patch' in req.req.qsparams |
|
998 | 1008 | if patch: |
|
999 | 1009 | lessvars['patch'] = morevars['patch'] = req.req.qsparams['patch'] |
|
1000 | 1010 | descend = 'descend' in req.req.qsparams |
|
1001 | 1011 | if descend: |
|
1002 | 1012 | lessvars['descend'] = morevars['descend'] = req.req.qsparams['descend'] |
|
1003 | 1013 | |
|
1004 | 1014 | count = fctx.filerev() + 1 |
|
1005 | 1015 | start = max(0, count - revcount) # first rev on this page |
|
1006 | 1016 | end = min(count, start + revcount) # last rev on this page |
|
1007 | 1017 | parity = paritygen(web.stripecount, offset=start - end) |
|
1008 | 1018 | |
|
1009 | 1019 | repo = web.repo |
|
1010 | 1020 | revs = fctx.filelog().revs(start, end - 1) |
|
1011 | 1021 | entries = [] |
|
1012 | 1022 | |
|
1013 | 1023 | diffstyle = web.config('web', 'style') |
|
1014 | 1024 | if 'style' in req.req.qsparams: |
|
1015 | 1025 | diffstyle = req.req.qsparams['style'] |
|
1016 | 1026 | |
|
1017 | 1027 | def diff(fctx, linerange=None): |
|
1018 | 1028 | ctx = fctx.changectx() |
|
1019 | 1029 | basectx = ctx.p1() |
|
1020 | 1030 | path = fctx.path() |
|
1021 | 1031 | return webutil.diffs(web, tmpl, ctx, basectx, [path], diffstyle, |
|
1022 | 1032 | linerange=linerange, |
|
1023 | 1033 | lineidprefix='%s-' % ctx.hex()[:12]) |
|
1024 | 1034 | |
|
1025 | 1035 | linerange = None |
|
1026 | 1036 | if lrange is not None: |
|
1027 | 1037 | linerange = webutil.formatlinerange(*lrange) |
|
1028 | 1038 | # deactivate numeric nav links when linerange is specified as this |
|
1029 | 1039 | # would required a dedicated "revnav" class |
|
1030 | 1040 | nav = None |
|
1031 | 1041 | if descend: |
|
1032 | 1042 | it = dagop.blockdescendants(fctx, *lrange) |
|
1033 | 1043 | else: |
|
1034 | 1044 | it = dagop.blockancestors(fctx, *lrange) |
|
1035 | 1045 | for i, (c, lr) in enumerate(it, 1): |
|
1036 | 1046 | diffs = None |
|
1037 | 1047 | if patch: |
|
1038 | 1048 | diffs = diff(c, linerange=lr) |
|
1039 | 1049 | # follow renames accross filtered (not in range) revisions |
|
1040 | 1050 | path = c.path() |
|
1041 | 1051 | entries.append(dict( |
|
1042 | 1052 | parity=next(parity), |
|
1043 | 1053 | filerev=c.rev(), |
|
1044 | 1054 | file=path, |
|
1045 | 1055 | diff=diffs, |
|
1046 | 1056 | linerange=webutil.formatlinerange(*lr), |
|
1047 | 1057 | **pycompat.strkwargs(webutil.commonentry(repo, c)))) |
|
1048 | 1058 | if i == revcount: |
|
1049 | 1059 | break |
|
1050 | 1060 | lessvars['linerange'] = webutil.formatlinerange(*lrange) |
|
1051 | 1061 | morevars['linerange'] = lessvars['linerange'] |
|
1052 | 1062 | else: |
|
1053 | 1063 | for i in revs: |
|
1054 | 1064 | iterfctx = fctx.filectx(i) |
|
1055 | 1065 | diffs = None |
|
1056 | 1066 | if patch: |
|
1057 | 1067 | diffs = diff(iterfctx) |
|
1058 | 1068 | entries.append(dict( |
|
1059 | 1069 | parity=next(parity), |
|
1060 | 1070 | filerev=i, |
|
1061 | 1071 | file=f, |
|
1062 | 1072 | diff=diffs, |
|
1063 | 1073 | rename=webutil.renamelink(iterfctx), |
|
1064 | 1074 | **pycompat.strkwargs(webutil.commonentry(repo, iterfctx)))) |
|
1065 | 1075 | entries.reverse() |
|
1066 | 1076 | revnav = webutil.filerevnav(web.repo, fctx.path()) |
|
1067 | 1077 | nav = revnav.gen(end - 1, revcount, count) |
|
1068 | 1078 | |
|
1069 | 1079 | latestentry = entries[:1] |
|
1070 | 1080 | |
|
1071 | return tmpl("filelog", | |
|
1072 |
|
|
|
1073 | nav=nav, | |
|
1074 | symrev=webutil.symrevorshortnode(req, fctx), | |
|
1075 | entries=entries, | |
|
1076 | descend=descend, | |
|
1077 | patch=patch, | |
|
1078 | latestentry=latestentry, | |
|
1079 | linerange=linerange, | |
|
1080 | revcount=revcount, | |
|
1081 | morevars=morevars, | |
|
1082 |
|
|
|
1083 | **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))) | |
|
1081 | web.res.setbodygen(tmpl( | |
|
1082 | 'filelog', | |
|
1083 | file=f, | |
|
1084 | nav=nav, | |
|
1085 | symrev=webutil.symrevorshortnode(req, fctx), | |
|
1086 | entries=entries, | |
|
1087 | descend=descend, | |
|
1088 | patch=patch, | |
|
1089 | latestentry=latestentry, | |
|
1090 | linerange=linerange, | |
|
1091 | revcount=revcount, | |
|
1092 | morevars=morevars, | |
|
1093 | lessvars=lessvars, | |
|
1094 | **pycompat.strkwargs(webutil.commonentry(web.repo, fctx)))) | |
|
1095 | ||
|
1096 | return web.res | |
|
1084 | 1097 | |
|
1085 | 1098 | @webcommand('archive') |
|
1086 | 1099 | def archive(web, req, tmpl): |
|
1087 | 1100 | """ |
|
1088 | 1101 | /archive/{revision}.{format}[/{path}] |
|
1089 | 1102 | ------------------------------------- |
|
1090 | 1103 | |
|
1091 | 1104 | Obtain an archive of repository content. |
|
1092 | 1105 | |
|
1093 | 1106 | The content and type of the archive is defined by a URL path parameter. |
|
1094 | 1107 | ``format`` is the file extension of the archive type to be generated. e.g. |
|
1095 | 1108 | ``zip`` or ``tar.bz2``. Not all archive types may be allowed by your |
|
1096 | 1109 | server configuration. |
|
1097 | 1110 | |
|
1098 | 1111 | The optional ``path`` URL parameter controls content to include in the |
|
1099 | 1112 | archive. If omitted, every file in the specified revision is present in the |
|
1100 | 1113 | archive. If included, only the specified file or contents of the specified |
|
1101 | 1114 | directory will be included in the archive. |
|
1102 | 1115 | |
|
1103 | 1116 | No template is used for this handler. Raw, binary content is generated. |
|
1104 | 1117 | """ |
|
1105 | 1118 | |
|
1106 | 1119 | type_ = req.req.qsparams.get('type') |
|
1107 | 1120 | allowed = web.configlist("web", "allow_archive") |
|
1108 | 1121 | key = req.req.qsparams['node'] |
|
1109 | 1122 | |
|
1110 | 1123 | if type_ not in web.archivespecs: |
|
1111 | 1124 | msg = 'Unsupported archive type: %s' % type_ |
|
1112 | 1125 | raise ErrorResponse(HTTP_NOT_FOUND, msg) |
|
1113 | 1126 | |
|
1114 | 1127 | if not ((type_ in allowed or |
|
1115 | 1128 | web.configbool("web", "allow" + type_))): |
|
1116 | 1129 | msg = 'Archive type not allowed: %s' % type_ |
|
1117 | 1130 | raise ErrorResponse(HTTP_FORBIDDEN, msg) |
|
1118 | 1131 | |
|
1119 | 1132 | reponame = re.sub(br"\W+", "-", os.path.basename(web.reponame)) |
|
1120 | 1133 | cnode = web.repo.lookup(key) |
|
1121 | 1134 | arch_version = key |
|
1122 | 1135 | if cnode == key or key == 'tip': |
|
1123 | 1136 | arch_version = short(cnode) |
|
1124 | 1137 | name = "%s-%s" % (reponame, arch_version) |
|
1125 | 1138 | |
|
1126 | 1139 | ctx = webutil.changectx(web.repo, req) |
|
1127 | 1140 | pats = [] |
|
1128 | 1141 | match = scmutil.match(ctx, []) |
|
1129 | 1142 | file = req.req.qsparams.get('file') |
|
1130 | 1143 | if file: |
|
1131 | 1144 | pats = ['path:' + file] |
|
1132 | 1145 | match = scmutil.match(ctx, pats, default='path') |
|
1133 | 1146 | if pats: |
|
1134 | 1147 | files = [f for f in ctx.manifest().keys() if match(f)] |
|
1135 | 1148 | if not files: |
|
1136 | 1149 | raise ErrorResponse(HTTP_NOT_FOUND, |
|
1137 | 1150 | 'file(s) not found: %s' % file) |
|
1138 | 1151 | |
|
1139 | 1152 | mimetype, artype, extension, encoding = web.archivespecs[type_] |
|
1140 | 1153 | headers = [ |
|
1141 | 1154 | ('Content-Disposition', 'attachment; filename=%s%s' % (name, extension)) |
|
1142 | 1155 | ] |
|
1143 | 1156 | if encoding: |
|
1144 | 1157 | headers.append(('Content-Encoding', encoding)) |
|
1145 | 1158 | req.headers.extend(headers) |
|
1146 | 1159 | req.respond(HTTP_OK, mimetype) |
|
1147 | 1160 | |
|
1148 | 1161 | archival.archive(web.repo, req, cnode, artype, prefix=name, |
|
1149 | 1162 | matchfn=match, |
|
1150 | 1163 | subrepos=web.configbool("web", "archivesubrepos")) |
|
1151 | 1164 | return [] |
|
1152 | 1165 | |
|
1153 | 1166 | |
|
1154 | 1167 | @webcommand('static') |
|
1155 | 1168 | def static(web, req, tmpl): |
|
1156 | 1169 | fname = req.req.qsparams['file'] |
|
1157 | 1170 | # a repo owner may set web.static in .hg/hgrc to get any file |
|
1158 | 1171 | # readable by the user running the CGI script |
|
1159 | 1172 | static = web.config("web", "static", None, untrusted=False) |
|
1160 | 1173 | if not static: |
|
1161 | 1174 | tp = web.templatepath or templater.templatepaths() |
|
1162 | 1175 | if isinstance(tp, str): |
|
1163 | 1176 | tp = [tp] |
|
1164 | 1177 | static = [os.path.join(p, 'static') for p in tp] |
|
1165 | 1178 | staticfile(static, fname, req) |
|
1166 | 1179 | return [] |
|
1167 | 1180 | |
|
1168 | 1181 | @webcommand('graph') |
|
1169 | 1182 | def graph(web, req, tmpl): |
|
1170 | 1183 | """ |
|
1171 | 1184 | /graph[/{revision}] |
|
1172 | 1185 | ------------------- |
|
1173 | 1186 | |
|
1174 | 1187 | Show information about the graphical topology of the repository. |
|
1175 | 1188 | |
|
1176 | 1189 | Information rendered by this handler can be used to create visual |
|
1177 | 1190 | representations of repository topology. |
|
1178 | 1191 | |
|
1179 | 1192 | The ``revision`` URL parameter controls the starting changeset. If it's |
|
1180 | 1193 | absent, the default is ``tip``. |
|
1181 | 1194 | |
|
1182 | 1195 | The ``revcount`` query string argument can define the number of changesets |
|
1183 | 1196 | to show information for. |
|
1184 | 1197 | |
|
1185 | 1198 | The ``graphtop`` query string argument can specify the starting changeset |
|
1186 | 1199 | for producing ``jsdata`` variable that is used for rendering graph in |
|
1187 | 1200 | JavaScript. By default it has the same value as ``revision``. |
|
1188 | 1201 | |
|
1189 | 1202 | This handler will render the ``graph`` template. |
|
1190 | 1203 | """ |
|
1191 | 1204 | |
|
1192 | 1205 | if 'node' in req.req.qsparams: |
|
1193 | 1206 | ctx = webutil.changectx(web.repo, req) |
|
1194 | 1207 | symrev = webutil.symrevorshortnode(req, ctx) |
|
1195 | 1208 | else: |
|
1196 | 1209 | ctx = web.repo['tip'] |
|
1197 | 1210 | symrev = 'tip' |
|
1198 | 1211 | rev = ctx.rev() |
|
1199 | 1212 | |
|
1200 | 1213 | bg_height = 39 |
|
1201 | 1214 | revcount = web.maxshortchanges |
|
1202 | 1215 | if 'revcount' in req.req.qsparams: |
|
1203 | 1216 | try: |
|
1204 | 1217 | revcount = int(req.req.qsparams.get('revcount', revcount)) |
|
1205 | 1218 | revcount = max(revcount, 1) |
|
1206 | 1219 | tmpl.defaults['sessionvars']['revcount'] = revcount |
|
1207 | 1220 | except ValueError: |
|
1208 | 1221 | pass |
|
1209 | 1222 | |
|
1210 | 1223 | lessvars = copy.copy(tmpl.defaults['sessionvars']) |
|
1211 | 1224 | lessvars['revcount'] = max(revcount // 2, 1) |
|
1212 | 1225 | morevars = copy.copy(tmpl.defaults['sessionvars']) |
|
1213 | 1226 | morevars['revcount'] = revcount * 2 |
|
1214 | 1227 | |
|
1215 | 1228 | graphtop = req.req.qsparams.get('graphtop', ctx.hex()) |
|
1216 | 1229 | graphvars = copy.copy(tmpl.defaults['sessionvars']) |
|
1217 | 1230 | graphvars['graphtop'] = graphtop |
|
1218 | 1231 | |
|
1219 | 1232 | count = len(web.repo) |
|
1220 | 1233 | pos = rev |
|
1221 | 1234 | |
|
1222 | 1235 | uprev = min(max(0, count - 1), rev + revcount) |
|
1223 | 1236 | downrev = max(0, rev - revcount) |
|
1224 | 1237 | changenav = webutil.revnav(web.repo).gen(pos, revcount, count) |
|
1225 | 1238 | |
|
1226 | 1239 | tree = [] |
|
1227 | 1240 | nextentry = [] |
|
1228 | 1241 | lastrev = 0 |
|
1229 | 1242 | if pos != -1: |
|
1230 | 1243 | allrevs = web.repo.changelog.revs(pos, 0) |
|
1231 | 1244 | revs = [] |
|
1232 | 1245 | for i in allrevs: |
|
1233 | 1246 | revs.append(i) |
|
1234 | 1247 | if len(revs) >= revcount + 1: |
|
1235 | 1248 | break |
|
1236 | 1249 | |
|
1237 | 1250 | if len(revs) > revcount: |
|
1238 | 1251 | nextentry = [webutil.commonentry(web.repo, web.repo[revs[-1]])] |
|
1239 | 1252 | revs = revs[:-1] |
|
1240 | 1253 | |
|
1241 | 1254 | lastrev = revs[-1] |
|
1242 | 1255 | |
|
1243 | 1256 | # We have to feed a baseset to dagwalker as it is expecting smartset |
|
1244 | 1257 | # object. This does not have a big impact on hgweb performance itself |
|
1245 | 1258 | # since hgweb graphing code is not itself lazy yet. |
|
1246 | 1259 | dag = graphmod.dagwalker(web.repo, smartset.baseset(revs)) |
|
1247 | 1260 | # As we said one line above... not lazy. |
|
1248 | 1261 | tree = list(item for item in graphmod.colored(dag, web.repo) |
|
1249 | 1262 | if item[1] == graphmod.CHANGESET) |
|
1250 | 1263 | |
|
1251 | 1264 | def nodecurrent(ctx): |
|
1252 | 1265 | wpnodes = web.repo.dirstate.parents() |
|
1253 | 1266 | if wpnodes[1] == nullid: |
|
1254 | 1267 | wpnodes = wpnodes[:1] |
|
1255 | 1268 | if ctx.node() in wpnodes: |
|
1256 | 1269 | return '@' |
|
1257 | 1270 | return '' |
|
1258 | 1271 | |
|
1259 | 1272 | def nodesymbol(ctx): |
|
1260 | 1273 | if ctx.obsolete(): |
|
1261 | 1274 | return 'x' |
|
1262 | 1275 | elif ctx.isunstable(): |
|
1263 | 1276 | return '*' |
|
1264 | 1277 | elif ctx.closesbranch(): |
|
1265 | 1278 | return '_' |
|
1266 | 1279 | else: |
|
1267 | 1280 | return 'o' |
|
1268 | 1281 | |
|
1269 | 1282 | def fulltree(): |
|
1270 | 1283 | pos = web.repo[graphtop].rev() |
|
1271 | 1284 | tree = [] |
|
1272 | 1285 | if pos != -1: |
|
1273 | 1286 | revs = web.repo.changelog.revs(pos, lastrev) |
|
1274 | 1287 | dag = graphmod.dagwalker(web.repo, smartset.baseset(revs)) |
|
1275 | 1288 | tree = list(item for item in graphmod.colored(dag, web.repo) |
|
1276 | 1289 | if item[1] == graphmod.CHANGESET) |
|
1277 | 1290 | return tree |
|
1278 | 1291 | |
|
1279 | 1292 | def jsdata(): |
|
1280 | 1293 | return [{'node': pycompat.bytestr(ctx), |
|
1281 | 1294 | 'graphnode': nodecurrent(ctx) + nodesymbol(ctx), |
|
1282 | 1295 | 'vertex': vtx, |
|
1283 | 1296 | 'edges': edges} |
|
1284 | 1297 | for (id, type, ctx, vtx, edges) in fulltree()] |
|
1285 | 1298 | |
|
1286 | 1299 | def nodes(): |
|
1287 | 1300 | parity = paritygen(web.stripecount) |
|
1288 | 1301 | for row, (id, type, ctx, vtx, edges) in enumerate(tree): |
|
1289 | 1302 | entry = webutil.commonentry(web.repo, ctx) |
|
1290 | 1303 | edgedata = [{'col': edge[0], |
|
1291 | 1304 | 'nextcol': edge[1], |
|
1292 | 1305 | 'color': (edge[2] - 1) % 6 + 1, |
|
1293 | 1306 | 'width': edge[3], |
|
1294 | 1307 | 'bcolor': edge[4]} |
|
1295 | 1308 | for edge in edges] |
|
1296 | 1309 | |
|
1297 | 1310 | entry.update({'col': vtx[0], |
|
1298 | 1311 | 'color': (vtx[1] - 1) % 6 + 1, |
|
1299 | 1312 | 'parity': next(parity), |
|
1300 | 1313 | 'edges': edgedata, |
|
1301 | 1314 | 'row': row, |
|
1302 | 1315 | 'nextrow': row + 1}) |
|
1303 | 1316 | |
|
1304 | 1317 | yield entry |
|
1305 | 1318 | |
|
1306 | 1319 | rows = len(tree) |
|
1307 | 1320 | |
|
1308 | 1321 | return tmpl('graph', rev=rev, symrev=symrev, revcount=revcount, |
|
1309 | 1322 | uprev=uprev, |
|
1310 | 1323 | lessvars=lessvars, morevars=morevars, downrev=downrev, |
|
1311 | 1324 | graphvars=graphvars, |
|
1312 | 1325 | rows=rows, |
|
1313 | 1326 | bg_height=bg_height, |
|
1314 | 1327 | changesets=count, |
|
1315 | 1328 | nextentry=nextentry, |
|
1316 | 1329 | jsdata=lambda **x: jsdata(), |
|
1317 | 1330 | nodes=lambda **x: nodes(), |
|
1318 | 1331 | node=ctx.hex(), changenav=changenav) |
|
1319 | 1332 | |
|
1320 | 1333 | def _getdoc(e): |
|
1321 | 1334 | doc = e[0].__doc__ |
|
1322 | 1335 | if doc: |
|
1323 | 1336 | doc = _(doc).partition('\n')[0] |
|
1324 | 1337 | else: |
|
1325 | 1338 | doc = _('(no help text available)') |
|
1326 | 1339 | return doc |
|
1327 | 1340 | |
|
1328 | 1341 | @webcommand('help') |
|
1329 | 1342 | def help(web, req, tmpl): |
|
1330 | 1343 | """ |
|
1331 | 1344 | /help[/{topic}] |
|
1332 | 1345 | --------------- |
|
1333 | 1346 | |
|
1334 | 1347 | Render help documentation. |
|
1335 | 1348 | |
|
1336 | 1349 | This web command is roughly equivalent to :hg:`help`. If a ``topic`` |
|
1337 | 1350 | is defined, that help topic will be rendered. If not, an index of |
|
1338 | 1351 | available help topics will be rendered. |
|
1339 | 1352 | |
|
1340 | 1353 | The ``help`` template will be rendered when requesting help for a topic. |
|
1341 | 1354 | ``helptopics`` will be rendered for the index of help topics. |
|
1342 | 1355 | """ |
|
1343 | 1356 | from .. import commands, help as helpmod # avoid cycle |
|
1344 | 1357 | |
|
1345 | 1358 | topicname = req.req.qsparams.get('node') |
|
1346 | 1359 | if not topicname: |
|
1347 | 1360 | def topics(**map): |
|
1348 | 1361 | for entries, summary, _doc in helpmod.helptable: |
|
1349 | 1362 | yield {'topic': entries[0], 'summary': summary} |
|
1350 | 1363 | |
|
1351 | 1364 | early, other = [], [] |
|
1352 | 1365 | primary = lambda s: s.partition('|')[0] |
|
1353 | 1366 | for c, e in commands.table.iteritems(): |
|
1354 | 1367 | doc = _getdoc(e) |
|
1355 | 1368 | if 'DEPRECATED' in doc or c.startswith('debug'): |
|
1356 | 1369 | continue |
|
1357 | 1370 | cmd = primary(c) |
|
1358 | 1371 | if cmd.startswith('^'): |
|
1359 | 1372 | early.append((cmd[1:], doc)) |
|
1360 | 1373 | else: |
|
1361 | 1374 | other.append((cmd, doc)) |
|
1362 | 1375 | |
|
1363 | 1376 | early.sort() |
|
1364 | 1377 | other.sort() |
|
1365 | 1378 | |
|
1366 | 1379 | def earlycommands(**map): |
|
1367 | 1380 | for c, doc in early: |
|
1368 | 1381 | yield {'topic': c, 'summary': doc} |
|
1369 | 1382 | |
|
1370 | 1383 | def othercommands(**map): |
|
1371 | 1384 | for c, doc in other: |
|
1372 | 1385 | yield {'topic': c, 'summary': doc} |
|
1373 | 1386 | |
|
1374 | 1387 | return tmpl('helptopics', topics=topics, earlycommands=earlycommands, |
|
1375 | 1388 | othercommands=othercommands, title='Index') |
|
1376 | 1389 | |
|
1377 | 1390 | # Render an index of sub-topics. |
|
1378 | 1391 | if topicname in helpmod.subtopics: |
|
1379 | 1392 | topics = [] |
|
1380 | 1393 | for entries, summary, _doc in helpmod.subtopics[topicname]: |
|
1381 | 1394 | topics.append({ |
|
1382 | 1395 | 'topic': '%s.%s' % (topicname, entries[0]), |
|
1383 | 1396 | 'basename': entries[0], |
|
1384 | 1397 | 'summary': summary, |
|
1385 | 1398 | }) |
|
1386 | 1399 | |
|
1387 | 1400 | return tmpl('helptopics', topics=topics, title=topicname, |
|
1388 | 1401 | subindex=True) |
|
1389 | 1402 | |
|
1390 | 1403 | u = webutil.wsgiui.load() |
|
1391 | 1404 | u.verbose = True |
|
1392 | 1405 | |
|
1393 | 1406 | # Render a page from a sub-topic. |
|
1394 | 1407 | if '.' in topicname: |
|
1395 | 1408 | # TODO implement support for rendering sections, like |
|
1396 | 1409 | # `hg help` works. |
|
1397 | 1410 | topic, subtopic = topicname.split('.', 1) |
|
1398 | 1411 | if topic not in helpmod.subtopics: |
|
1399 | 1412 | raise ErrorResponse(HTTP_NOT_FOUND) |
|
1400 | 1413 | else: |
|
1401 | 1414 | topic = topicname |
|
1402 | 1415 | subtopic = None |
|
1403 | 1416 | |
|
1404 | 1417 | try: |
|
1405 | 1418 | doc = helpmod.help_(u, commands, topic, subtopic=subtopic) |
|
1406 | 1419 | except error.Abort: |
|
1407 | 1420 | raise ErrorResponse(HTTP_NOT_FOUND) |
|
1408 | 1421 | return tmpl('help', topic=topicname, doc=doc) |
|
1409 | 1422 | |
|
1410 | 1423 | # tell hggettext to extract docstrings from these functions: |
|
1411 | 1424 | i18nfunctions = commands.values() |
General Comments 0
You need to be logged in to leave comments.
Login now