##// END OF EJS Templates
imports: always use CONFIG as kallithea.CONFIG
Mads Kiilerich -
r8433:072c0352 default
parent child Browse files
Show More
@@ -1,134 +1,134 b''
1 1 # -*- coding: utf-8 -*-
2 2 # This program is free software: you can redistribute it and/or modify
3 3 # it under the terms of the GNU General Public License as published by
4 4 # the Free Software Foundation, either version 3 of the License, or
5 5 # (at your option) any later version.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 """
15 15 kallithea.controllers.feed
16 16 ~~~~~~~~~~~~~~~~~~~~~~~~~~
17 17
18 18 Feed controller for Kallithea
19 19
20 20 This file was forked by the Kallithea project in July 2014.
21 21 Original author and date, and relevant copyright and licensing information is below:
22 22 :created_on: Apr 23, 2010
23 23 :author: marcink
24 24 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 25 :license: GPLv3, see LICENSE.md for more details.
26 26 """
27 27
28 28
29 29 import logging
30 30
31 31 from beaker.cache import cache_region
32 32 from tg import response
33 33 from tg import tmpl_context as c
34 34 from tg.i18n import ugettext as _
35 35
36 from kallithea import CONFIG
36 import kallithea
37 37 from kallithea.lib import feeds
38 38 from kallithea.lib import helpers as h
39 39 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
40 40 from kallithea.lib.base import BaseRepoController
41 41 from kallithea.lib.diffs import DiffProcessor
42 42 from kallithea.lib.utils2 import asbool, safe_int, safe_str
43 43
44 44
45 45 log = logging.getLogger(__name__)
46 46
47 47
48 48 class FeedController(BaseRepoController):
49 49
50 50 @LoginRequired(allow_default_user=True)
51 51 @HasRepoPermissionLevelDecorator('read')
52 52 def _before(self, *args, **kwargs):
53 53 super(FeedController, self)._before(*args, **kwargs)
54 54
55 55 def _get_title(self, cs):
56 56 return h.shorter(cs.message, 160)
57 57
58 58 def __get_desc(self, cs):
59 59 desc_msg = [(_('%s committed on %s')
60 60 % (h.person(cs.author), h.fmt_date(cs.date))) + '<br/>']
61 61 # branches, tags, bookmarks
62 62 for branch in cs.branches:
63 63 desc_msg.append('branch: %s<br/>' % branch)
64 64 for book in cs.bookmarks:
65 65 desc_msg.append('bookmark: %s<br/>' % book)
66 66 for tag in cs.tags:
67 67 desc_msg.append('tag: %s<br/>' % tag)
68 68
69 69 changes = []
70 diff_limit = safe_int(CONFIG.get('rss_cut_off_limit', 32 * 1024))
70 diff_limit = safe_int(kallithea.CONFIG.get('rss_cut_off_limit', 32 * 1024))
71 71 raw_diff = cs.diff()
72 72 diff_processor = DiffProcessor(raw_diff,
73 73 diff_limit=diff_limit,
74 74 inline_diff=False)
75 75
76 76 for st in diff_processor.parsed:
77 77 st.update({'added': st['stats']['added'],
78 78 'removed': st['stats']['deleted']})
79 79 changes.append('\n %(operation)s %(filename)s '
80 80 '(%(added)s lines added, %(removed)s lines removed)'
81 81 % st)
82 82 if diff_processor.limited_diff:
83 83 changes = changes + ['\n ' +
84 84 _('Changeset was too big and was cut off...')]
85 85
86 86 # rev link
87 87 _url = h.canonical_url('changeset_home', repo_name=c.db_repo.repo_name,
88 88 revision=cs.raw_id)
89 89 desc_msg.append('changeset: <a href="%s">%s</a>' % (_url, cs.raw_id[:8]))
90 90
91 91 desc_msg.append('<pre>')
92 92 desc_msg.append(h.urlify_text(cs.message))
93 93 desc_msg.append('\n')
94 94 desc_msg.extend(changes)
95 if asbool(CONFIG.get('rss_include_diff', False)):
95 if asbool(kallithea.CONFIG.get('rss_include_diff', False)):
96 96 desc_msg.append('\n\n')
97 97 desc_msg.append(safe_str(raw_diff))
98 98 desc_msg.append('</pre>')
99 99 return desc_msg
100 100
101 101 def _feed(self, repo_name, feeder):
102 102 """Produce a simple feed"""
103 103
104 104 @cache_region('long_term_file', '_get_feed_from_cache')
105 105 def _get_feed_from_cache(*_cache_keys): # parameters are not really used - only as caching key
106 106 header = dict(
107 107 title=_('%s %s feed') % (c.site_name, repo_name),
108 108 link=h.canonical_url('summary_home', repo_name=repo_name),
109 109 description=_('Changes on %s repository') % repo_name,
110 110 )
111 111
112 rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20))
112 rss_items_per_page = safe_int(kallithea.CONFIG.get('rss_items_per_page', 20))
113 113 entries=[]
114 114 for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])):
115 115 entries.append(dict(
116 116 title=self._get_title(cs),
117 117 link=h.canonical_url('changeset_home', repo_name=repo_name, revision=cs.raw_id),
118 118 author_email=cs.author_email,
119 119 author_name=cs.author_name,
120 120 description=''.join(self.__get_desc(cs)),
121 121 pubdate=cs.date,
122 122 ))
123 123 return feeder.render(header, entries)
124 124
125 125 response.content_type = feeder.content_type
126 126 return _get_feed_from_cache(repo_name, feeder.__name__)
127 127
128 128 def atom(self, repo_name):
129 129 """Produce a simple atom-1.0 feed"""
130 130 return self._feed(repo_name, feeds.AtomFeed)
131 131
132 132 def rss(self, repo_name):
133 133 """Produce a simple rss2 feed"""
134 134 return self._feed(repo_name, feeds.RssFeed)
@@ -1,746 +1,746 b''
1 1 # -*- coding: utf-8 -*-
2 2 # This program is free software: you can redistribute it and/or modify
3 3 # it under the terms of the GNU General Public License as published by
4 4 # the Free Software Foundation, either version 3 of the License, or
5 5 # (at your option) any later version.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 """
15 15 kallithea.controllers.files
16 16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~
17 17
18 18 Files controller for Kallithea
19 19
20 20 This file was forked by the Kallithea project in July 2014.
21 21 Original author and date, and relevant copyright and licensing information is below:
22 22 :created_on: Apr 21, 2010
23 23 :author: marcink
24 24 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 25 :license: GPLv3, see LICENSE.md for more details.
26 26 """
27 27
28 28 import logging
29 29 import os
30 30 import posixpath
31 31 import shutil
32 32 import tempfile
33 33 import traceback
34 34 from collections import OrderedDict
35 35
36 36 from tg import request, response
37 37 from tg import tmpl_context as c
38 38 from tg.i18n import ugettext as _
39 39 from webob.exc import HTTPFound, HTTPNotFound
40 40
41 import kallithea
41 42 from kallithea.config.routing import url
42 43 from kallithea.lib import diffs
43 44 from kallithea.lib import helpers as h
44 45 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
45 46 from kallithea.lib.base import BaseRepoController, jsonify, render
46 47 from kallithea.lib.exceptions import NonRelativePathError
47 48 from kallithea.lib.utils import action_logger
48 49 from kallithea.lib.utils2 import asbool, convert_line_endings, detect_mode, safe_str
49 50 from kallithea.lib.vcs.backends.base import EmptyChangeset
50 51 from kallithea.lib.vcs.conf import settings
51 52 from kallithea.lib.vcs.exceptions import (ChangesetDoesNotExistError, ChangesetError, EmptyRepositoryError, ImproperArchiveTypeError, NodeAlreadyExistsError,
52 53 NodeDoesNotExistError, NodeError, RepositoryError, VCSError)
53 54 from kallithea.lib.vcs.nodes import FileNode
54 55 from kallithea.model import db
55 56 from kallithea.model.repo import RepoModel
56 57 from kallithea.model.scm import ScmModel
57 58
58 59
59 60 log = logging.getLogger(__name__)
60 61
61 62
62 63 class FilesController(BaseRepoController):
63 64
64 65 def _before(self, *args, **kwargs):
65 66 super(FilesController, self)._before(*args, **kwargs)
66 67
67 68 def __get_cs(self, rev, silent_empty=False):
68 69 """
69 70 Safe way to get changeset if error occur it redirects to tip with
70 71 proper message
71 72
72 73 :param rev: revision to fetch
73 74 :silent_empty: return None if repository is empty
74 75 """
75 76
76 77 try:
77 78 return c.db_repo_scm_instance.get_changeset(rev)
78 79 except EmptyRepositoryError as e:
79 80 if silent_empty:
80 81 return None
81 82 url_ = url('files_add_home',
82 83 repo_name=c.repo_name,
83 84 revision=0, f_path='', anchor='edit')
84 85 add_new = h.link_to(_('Click here to add new file'), url_, class_="alert-link")
85 86 h.flash(_('There are no files yet.') + ' ' + add_new, category='warning')
86 87 raise HTTPNotFound()
87 88 except (ChangesetDoesNotExistError, LookupError):
88 89 msg = _('Such revision does not exist for this repository')
89 90 h.flash(msg, category='error')
90 91 raise HTTPNotFound()
91 92 except RepositoryError as e:
92 93 h.flash(e, category='error')
93 94 raise HTTPNotFound()
94 95
95 96 def __get_filenode(self, cs, path):
96 97 """
97 98 Returns file_node or raise HTTP error.
98 99
99 100 :param cs: given changeset
100 101 :param path: path to lookup
101 102 """
102 103
103 104 try:
104 105 file_node = cs.get_node(path)
105 106 if file_node.is_dir():
106 107 raise RepositoryError('given path is a directory')
107 108 except ChangesetDoesNotExistError:
108 109 msg = _('Such revision does not exist for this repository')
109 110 h.flash(msg, category='error')
110 111 raise HTTPNotFound()
111 112 except RepositoryError as e:
112 113 h.flash(e, category='error')
113 114 raise HTTPNotFound()
114 115
115 116 return file_node
116 117
117 118 @LoginRequired(allow_default_user=True)
118 119 @HasRepoPermissionLevelDecorator('read')
119 120 def index(self, repo_name, revision, f_path, annotate=False):
120 121 # redirect to given revision from form if given
121 122 post_revision = request.POST.get('at_rev', None)
122 123 if post_revision:
123 124 cs = self.__get_cs(post_revision) # FIXME - unused!
124 125
125 126 c.revision = revision
126 127 c.changeset = self.__get_cs(revision)
127 128 c.branch = request.GET.get('branch', None)
128 129 c.f_path = f_path
129 130 c.annotate = annotate
130 131 cur_rev = c.changeset.revision
131 132 # used in files_source.html:
132 133 c.cut_off_limit = self.cut_off_limit
133 134 c.fulldiff = request.GET.get('fulldiff')
134 135
135 136 # prev link
136 137 try:
137 138 prev_rev = c.db_repo_scm_instance.get_changeset(cur_rev).prev(c.branch)
138 139 c.url_prev = url('files_home', repo_name=c.repo_name,
139 140 revision=prev_rev.raw_id, f_path=f_path)
140 141 if c.branch:
141 142 c.url_prev += '?branch=%s' % c.branch
142 143 except (ChangesetDoesNotExistError, VCSError):
143 144 c.url_prev = '#'
144 145
145 146 # next link
146 147 try:
147 148 next_rev = c.db_repo_scm_instance.get_changeset(cur_rev).next(c.branch)
148 149 c.url_next = url('files_home', repo_name=c.repo_name,
149 150 revision=next_rev.raw_id, f_path=f_path)
150 151 if c.branch:
151 152 c.url_next += '?branch=%s' % c.branch
152 153 except (ChangesetDoesNotExistError, VCSError):
153 154 c.url_next = '#'
154 155
155 156 # files or dirs
156 157 try:
157 158 c.file = c.changeset.get_node(f_path)
158 159
159 160 if c.file.is_submodule():
160 161 raise HTTPFound(location=c.file.url)
161 162 elif c.file.is_file():
162 163 c.load_full_history = False
163 164 # determine if we're on branch head
164 165 _branches = c.db_repo_scm_instance.branches
165 166 c.on_branch_head = revision in _branches or revision in _branches.values()
166 167 _hist = []
167 168 c.file_history = []
168 169 if c.load_full_history:
169 170 c.file_history, _hist = self._get_node_history(c.changeset, f_path)
170 171
171 172 c.authors = []
172 173 for a in set([x.author for x in _hist]):
173 174 c.authors.append((h.email(a), h.person(a)))
174 175 else:
175 176 c.authors = c.file_history = []
176 177 except RepositoryError as e:
177 178 h.flash(e, category='error')
178 179 raise HTTPNotFound()
179 180
180 181 if request.environ.get('HTTP_X_PARTIAL_XHR'):
181 182 return render('files/files_ypjax.html')
182 183
183 184 # TODO: tags and bookmarks?
184 185 c.revision_options = [(c.changeset.raw_id,
185 186 _('%s at %s') % (b, h.short_id(c.changeset.raw_id))) for b in c.changeset.branches] + \
186 187 [(n, b) for b, n in c.db_repo_scm_instance.branches.items()]
187 188 if c.db_repo_scm_instance.closed_branches:
188 189 prefix = _('(closed)') + ' '
189 190 c.revision_options += [('-', '-')] + \
190 191 [(n, prefix + b) for b, n in c.db_repo_scm_instance.closed_branches.items()]
191 192
192 193 return render('files/files.html')
193 194
194 195 @LoginRequired(allow_default_user=True)
195 196 @HasRepoPermissionLevelDecorator('read')
196 197 @jsonify
197 198 def history(self, repo_name, revision, f_path):
198 199 changeset = self.__get_cs(revision)
199 200 _file = changeset.get_node(f_path)
200 201 if _file.is_file():
201 202 file_history, _hist = self._get_node_history(changeset, f_path)
202 203
203 204 res = []
204 205 for obj in file_history:
205 206 res.append({
206 207 'text': obj[1],
207 208 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]]
208 209 })
209 210
210 211 data = {
211 212 'more': False,
212 213 'results': res
213 214 }
214 215 return data
215 216
216 217 @LoginRequired(allow_default_user=True)
217 218 @HasRepoPermissionLevelDecorator('read')
218 219 def authors(self, repo_name, revision, f_path):
219 220 changeset = self.__get_cs(revision)
220 221 _file = changeset.get_node(f_path)
221 222 if _file.is_file():
222 223 file_history, _hist = self._get_node_history(changeset, f_path)
223 224 c.authors = []
224 225 for a in set([x.author for x in _hist]):
225 226 c.authors.append((h.email(a), h.person(a)))
226 227 return render('files/files_history_box.html')
227 228
228 229 @LoginRequired(allow_default_user=True)
229 230 @HasRepoPermissionLevelDecorator('read')
230 231 def rawfile(self, repo_name, revision, f_path):
231 232 cs = self.__get_cs(revision)
232 233 file_node = self.__get_filenode(cs, f_path)
233 234
234 235 response.content_disposition = \
235 236 'attachment; filename=%s' % f_path.split(db.URL_SEP)[-1]
236 237
237 238 response.content_type = file_node.mimetype
238 239 return file_node.content
239 240
240 241 @LoginRequired(allow_default_user=True)
241 242 @HasRepoPermissionLevelDecorator('read')
242 243 def raw(self, repo_name, revision, f_path):
243 244 cs = self.__get_cs(revision)
244 245 file_node = self.__get_filenode(cs, f_path)
245 246
246 247 raw_mimetype_mapping = {
247 248 # map original mimetype to a mimetype used for "show as raw"
248 249 # you can also provide a content-disposition to override the
249 250 # default "attachment" disposition.
250 251 # orig_type: (new_type, new_dispo)
251 252
252 253 # show images inline:
253 254 'image/x-icon': ('image/x-icon', 'inline'),
254 255 'image/png': ('image/png', 'inline'),
255 256 'image/gif': ('image/gif', 'inline'),
256 257 'image/jpeg': ('image/jpeg', 'inline'),
257 258 'image/svg+xml': ('image/svg+xml', 'inline'),
258 259 }
259 260
260 261 mimetype = file_node.mimetype
261 262 try:
262 263 mimetype, dispo = raw_mimetype_mapping[mimetype]
263 264 except KeyError:
264 265 # we don't know anything special about this, handle it safely
265 266 if file_node.is_binary:
266 267 # do same as download raw for binary files
267 268 mimetype, dispo = 'application/octet-stream', 'attachment'
268 269 else:
269 270 # do not just use the original mimetype, but force text/plain,
270 271 # otherwise it would serve text/html and that might be unsafe.
271 272 # Note: underlying vcs library fakes text/plain mimetype if the
272 273 # mimetype can not be determined and it thinks it is not
273 274 # binary.This might lead to erroneous text display in some
274 275 # cases, but helps in other cases, like with text files
275 276 # without extension.
276 277 mimetype, dispo = 'text/plain', 'inline'
277 278
278 279 if dispo == 'attachment':
279 280 dispo = 'attachment; filename=%s' % f_path.split(os.sep)[-1]
280 281
281 282 response.content_disposition = dispo
282 283 response.content_type = mimetype
283 284 return file_node.content
284 285
285 286 @LoginRequired()
286 287 @HasRepoPermissionLevelDecorator('write')
287 288 def delete(self, repo_name, revision, f_path):
288 289 repo = c.db_repo
289 290 # check if revision is a branch identifier- basically we cannot
290 291 # create multiple heads via file editing
291 292 _branches = repo.scm_instance.branches
292 293 # check if revision is a branch name or branch hash
293 294 if revision not in _branches and revision not in _branches.values():
294 295 h.flash(_('You can only delete files with revision '
295 296 'being a valid branch'), category='warning')
296 297 raise HTTPFound(location=h.url('files_home',
297 298 repo_name=repo_name, revision='tip',
298 299 f_path=f_path))
299 300
300 301 r_post = request.POST
301 302
302 303 c.cs = self.__get_cs(revision)
303 304 c.file = self.__get_filenode(c.cs, f_path)
304 305
305 306 c.default_message = _('Deleted file %s via Kallithea') % (f_path)
306 307 c.f_path = f_path
307 308 node_path = f_path
308 309 author = request.authuser.full_contact
309 310
310 311 if r_post:
311 312 message = r_post.get('message') or c.default_message
312 313
313 314 try:
314 315 nodes = {
315 316 node_path: {
316 317 'content': ''
317 318 }
318 319 }
319 320 self.scm_model.delete_nodes(
320 321 user=request.authuser.user_id,
321 322 ip_addr=request.ip_addr,
322 323 repo=c.db_repo,
323 324 message=message,
324 325 nodes=nodes,
325 326 parent_cs=c.cs,
326 327 author=author,
327 328 )
328 329
329 330 h.flash(_('Successfully deleted file %s') % f_path,
330 331 category='success')
331 332 except Exception:
332 333 log.error(traceback.format_exc())
333 334 h.flash(_('Error occurred during commit'), category='error')
334 335 raise HTTPFound(location=url('changeset_home',
335 336 repo_name=c.repo_name, revision='tip'))
336 337
337 338 return render('files/files_delete.html')
338 339
339 340 @LoginRequired()
340 341 @HasRepoPermissionLevelDecorator('write')
341 342 def edit(self, repo_name, revision, f_path):
342 343 repo = c.db_repo
343 344 # check if revision is a branch identifier- basically we cannot
344 345 # create multiple heads via file editing
345 346 _branches = repo.scm_instance.branches
346 347 # check if revision is a branch name or branch hash
347 348 if revision not in _branches and revision not in _branches.values():
348 349 h.flash(_('You can only edit files with revision '
349 350 'being a valid branch'), category='warning')
350 351 raise HTTPFound(location=h.url('files_home',
351 352 repo_name=repo_name, revision='tip',
352 353 f_path=f_path))
353 354
354 355 r_post = request.POST
355 356
356 357 c.cs = self.__get_cs(revision)
357 358 c.file = self.__get_filenode(c.cs, f_path)
358 359
359 360 if c.file.is_binary:
360 361 raise HTTPFound(location=url('files_home', repo_name=c.repo_name,
361 362 revision=c.cs.raw_id, f_path=f_path))
362 363 c.default_message = _('Edited file %s via Kallithea') % (f_path)
363 364 c.f_path = f_path
364 365
365 366 if r_post:
366 367 old_content = safe_str(c.file.content)
367 368 sl = old_content.splitlines(1)
368 369 first_line = sl[0] if sl else ''
369 370 # modes: 0 - Unix, 1 - Mac, 2 - DOS
370 371 mode = detect_mode(first_line, 0)
371 372 content = convert_line_endings(r_post.get('content', ''), mode)
372 373
373 374 message = r_post.get('message') or c.default_message
374 375 author = request.authuser.full_contact
375 376
376 377 if content == old_content:
377 378 h.flash(_('No changes'), category='warning')
378 379 raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
379 380 revision='tip'))
380 381 try:
381 382 self.scm_model.commit_change(repo=c.db_repo_scm_instance,
382 383 repo_name=repo_name, cs=c.cs,
383 384 user=request.authuser.user_id,
384 385 ip_addr=request.ip_addr,
385 386 author=author, message=message,
386 387 content=content, f_path=f_path)
387 388 h.flash(_('Successfully committed to %s') % f_path,
388 389 category='success')
389 390 except Exception:
390 391 log.error(traceback.format_exc())
391 392 h.flash(_('Error occurred during commit'), category='error')
392 393 raise HTTPFound(location=url('changeset_home',
393 394 repo_name=c.repo_name, revision='tip'))
394 395
395 396 return render('files/files_edit.html')
396 397
397 398 @LoginRequired()
398 399 @HasRepoPermissionLevelDecorator('write')
399 400 def add(self, repo_name, revision, f_path):
400 401
401 402 repo = c.db_repo
402 403 r_post = request.POST
403 404 c.cs = self.__get_cs(revision, silent_empty=True)
404 405 if c.cs is None:
405 406 c.cs = EmptyChangeset(alias=c.db_repo_scm_instance.alias)
406 407 c.default_message = (_('Added file via Kallithea'))
407 408 c.f_path = f_path
408 409
409 410 if r_post:
410 411 unix_mode = 0
411 412 content = convert_line_endings(r_post.get('content', ''), unix_mode)
412 413
413 414 message = r_post.get('message') or c.default_message
414 415 filename = r_post.get('filename')
415 416 location = r_post.get('location', '')
416 417 file_obj = r_post.get('upload_file', None)
417 418
418 419 if file_obj is not None and hasattr(file_obj, 'filename'):
419 420 filename = file_obj.filename
420 421 content = file_obj.file
421 422
422 423 if hasattr(content, 'file'):
423 424 # non posix systems store real file under file attr
424 425 content = content.file
425 426
426 427 if not content:
427 428 h.flash(_('No content'), category='warning')
428 429 raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
429 430 revision='tip'))
430 431 if not filename:
431 432 h.flash(_('No filename'), category='warning')
432 433 raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
433 434 revision='tip'))
434 435 # strip all crap out of file, just leave the basename
435 436 filename = os.path.basename(filename)
436 437 node_path = posixpath.join(location, filename)
437 438 author = request.authuser.full_contact
438 439
439 440 try:
440 441 nodes = {
441 442 node_path: {
442 443 'content': content
443 444 }
444 445 }
445 446 self.scm_model.create_nodes(
446 447 user=request.authuser.user_id,
447 448 ip_addr=request.ip_addr,
448 449 repo=c.db_repo,
449 450 message=message,
450 451 nodes=nodes,
451 452 parent_cs=c.cs,
452 453 author=author,
453 454 )
454 455
455 456 h.flash(_('Successfully committed to %s') % node_path,
456 457 category='success')
457 458 except NonRelativePathError as e:
458 459 h.flash(_('Location must be relative path and must not '
459 460 'contain .. in path'), category='warning')
460 461 raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
461 462 revision='tip'))
462 463 except (NodeError, NodeAlreadyExistsError) as e:
463 464 h.flash(_(e), category='error')
464 465 except Exception:
465 466 log.error(traceback.format_exc())
466 467 h.flash(_('Error occurred during commit'), category='error')
467 468 raise HTTPFound(location=url('changeset_home',
468 469 repo_name=c.repo_name, revision='tip'))
469 470
470 471 return render('files/files_add.html')
471 472
472 473 @LoginRequired(allow_default_user=True)
473 474 @HasRepoPermissionLevelDecorator('read')
474 475 def archivefile(self, repo_name, fname):
475 476 fileformat = None
476 477 revision = None
477 478 ext = None
478 479 subrepos = request.GET.get('subrepos') == 'true'
479 480
480 481 for a_type, ext_data in settings.ARCHIVE_SPECS.items():
481 482 archive_spec = fname.split(ext_data[1])
482 483 if len(archive_spec) == 2 and archive_spec[1] == '':
483 484 fileformat = a_type or ext_data[1]
484 485 revision = archive_spec[0]
485 486 ext = ext_data[1]
486 487
487 488 try:
488 489 dbrepo = RepoModel().get_by_repo_name(repo_name)
489 490 if not dbrepo.enable_downloads:
490 491 return _('Downloads disabled') # TODO: do something else?
491 492
492 493 if c.db_repo_scm_instance.alias == 'hg':
493 494 # patch and reset hooks section of UI config to not run any
494 495 # hooks on fetching archives with subrepos
495 496 for k, v in c.db_repo_scm_instance._repo.ui.configitems('hooks'):
496 497 c.db_repo_scm_instance._repo.ui.setconfig('hooks', k, None)
497 498
498 499 cs = c.db_repo_scm_instance.get_changeset(revision)
499 500 content_type = settings.ARCHIVE_SPECS[fileformat][0]
500 501 except ChangesetDoesNotExistError:
501 502 return _('Unknown revision %s') % revision
502 503 except EmptyRepositoryError:
503 504 return _('Empty repository')
504 505 except (ImproperArchiveTypeError, KeyError):
505 506 return _('Unknown archive type')
506 507
507 from kallithea import CONFIG
508 508 rev_name = cs.raw_id[:12]
509 509 archive_name = '%s-%s%s' % (repo_name.replace('/', '_'), rev_name, ext)
510 510
511 511 archive_path = None
512 512 cached_archive_path = None
513 archive_cache_dir = CONFIG.get('archive_cache_dir')
513 archive_cache_dir = kallithea.CONFIG.get('archive_cache_dir')
514 514 if archive_cache_dir and not subrepos: # TODO: subrepo caching?
515 515 if not os.path.isdir(archive_cache_dir):
516 516 os.makedirs(archive_cache_dir)
517 517 cached_archive_path = os.path.join(archive_cache_dir, archive_name)
518 518 if os.path.isfile(cached_archive_path):
519 519 log.debug('Found cached archive in %s', cached_archive_path)
520 520 archive_path = cached_archive_path
521 521 else:
522 522 log.debug('Archive %s is not yet cached', archive_name)
523 523
524 524 if archive_path is None:
525 525 # generate new archive
526 526 fd, archive_path = tempfile.mkstemp()
527 527 log.debug('Creating new temp archive in %s', archive_path)
528 528 with os.fdopen(fd, 'wb') as stream:
529 529 cs.fill_archive(stream=stream, kind=fileformat, subrepos=subrepos)
530 530 # stream (and thus fd) has been closed by cs.fill_archive
531 531 if cached_archive_path is not None:
532 532 # we generated the archive - move it to cache
533 533 log.debug('Storing new archive in %s', cached_archive_path)
534 534 shutil.move(archive_path, cached_archive_path)
535 535 archive_path = cached_archive_path
536 536
537 537 def get_chunked_archive(archive_path):
538 538 stream = open(archive_path, 'rb')
539 539 while True:
540 540 data = stream.read(16 * 1024)
541 541 if not data:
542 542 break
543 543 yield data
544 544 stream.close()
545 545 if archive_path != cached_archive_path:
546 546 log.debug('Destroying temp archive %s', archive_path)
547 547 os.remove(archive_path)
548 548
549 549 action_logger(user=request.authuser,
550 550 action='user_downloaded_archive:%s' % (archive_name),
551 551 repo=repo_name, ipaddr=request.ip_addr, commit=True)
552 552
553 553 response.content_disposition = str('attachment; filename=%s' % (archive_name))
554 554 response.content_type = str(content_type)
555 555 return get_chunked_archive(archive_path)
556 556
557 557 @LoginRequired(allow_default_user=True)
558 558 @HasRepoPermissionLevelDecorator('read')
559 559 def diff(self, repo_name, f_path):
560 560 ignore_whitespace_diff = h.get_ignore_whitespace_diff(request.GET)
561 561 diff_context_size = h.get_diff_context_size(request.GET)
562 562 diff2 = request.GET.get('diff2', '')
563 563 diff1 = request.GET.get('diff1', '') or diff2
564 564 c.action = request.GET.get('diff')
565 565 c.no_changes = diff1 == diff2
566 566 c.f_path = f_path
567 567 c.big_diff = False
568 568 fulldiff = request.GET.get('fulldiff')
569 569 c.changes = OrderedDict()
570 570 c.changes[diff2] = []
571 571
572 572 # special case if we want a show rev only, it's impl here
573 573 # to reduce JS and callbacks
574 574
575 575 if request.GET.get('show_rev'):
576 576 if asbool(request.GET.get('annotate', 'False')):
577 577 _url = url('files_annotate_home', repo_name=c.repo_name,
578 578 revision=diff1, f_path=c.f_path)
579 579 else:
580 580 _url = url('files_home', repo_name=c.repo_name,
581 581 revision=diff1, f_path=c.f_path)
582 582
583 583 raise HTTPFound(location=_url)
584 584 try:
585 585 if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]:
586 586 c.changeset_1 = c.db_repo_scm_instance.get_changeset(diff1)
587 587 try:
588 588 node1 = c.changeset_1.get_node(f_path)
589 589 if node1.is_dir():
590 590 raise NodeError('%s path is a %s not a file'
591 591 % (node1, type(node1)))
592 592 except NodeDoesNotExistError:
593 593 c.changeset_1 = EmptyChangeset(cs=diff1,
594 594 revision=c.changeset_1.revision,
595 595 repo=c.db_repo_scm_instance)
596 596 node1 = FileNode(f_path, '', changeset=c.changeset_1)
597 597 else:
598 598 c.changeset_1 = EmptyChangeset(repo=c.db_repo_scm_instance)
599 599 node1 = FileNode(f_path, '', changeset=c.changeset_1)
600 600
601 601 if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]:
602 602 c.changeset_2 = c.db_repo_scm_instance.get_changeset(diff2)
603 603 try:
604 604 node2 = c.changeset_2.get_node(f_path)
605 605 if node2.is_dir():
606 606 raise NodeError('%s path is a %s not a file'
607 607 % (node2, type(node2)))
608 608 except NodeDoesNotExistError:
609 609 c.changeset_2 = EmptyChangeset(cs=diff2,
610 610 revision=c.changeset_2.revision,
611 611 repo=c.db_repo_scm_instance)
612 612 node2 = FileNode(f_path, '', changeset=c.changeset_2)
613 613 else:
614 614 c.changeset_2 = EmptyChangeset(repo=c.db_repo_scm_instance)
615 615 node2 = FileNode(f_path, '', changeset=c.changeset_2)
616 616 except (RepositoryError, NodeError):
617 617 log.error(traceback.format_exc())
618 618 raise HTTPFound(location=url('files_home', repo_name=c.repo_name,
619 619 f_path=f_path))
620 620
621 621 if c.action == 'download':
622 622 raw_diff = diffs.get_gitdiff(node1, node2,
623 623 ignore_whitespace=ignore_whitespace_diff,
624 624 context=diff_context_size)
625 625 diff_name = '%s_vs_%s.diff' % (diff1, diff2)
626 626 response.content_type = 'text/plain'
627 627 response.content_disposition = (
628 628 'attachment; filename=%s' % diff_name
629 629 )
630 630 return raw_diff
631 631
632 632 elif c.action == 'raw':
633 633 raw_diff = diffs.get_gitdiff(node1, node2,
634 634 ignore_whitespace=ignore_whitespace_diff,
635 635 context=diff_context_size)
636 636 response.content_type = 'text/plain'
637 637 return raw_diff
638 638
639 639 else:
640 640 fid = h.FID(diff2, node2.path)
641 641 diff_limit = None if fulldiff else self.cut_off_limit
642 642 c.a_rev, c.cs_rev, a_path, diff, st, op = diffs.wrapped_diff(filenode_old=node1,
643 643 filenode_new=node2,
644 644 diff_limit=diff_limit,
645 645 ignore_whitespace=ignore_whitespace_diff,
646 646 line_context=diff_context_size,
647 647 enable_comments=False)
648 648 c.file_diff_data = [(fid, fid, op, a_path, node2.path, diff, st)]
649 649 return render('files/file_diff.html')
650 650
651 651 @LoginRequired(allow_default_user=True)
652 652 @HasRepoPermissionLevelDecorator('read')
653 653 def diff_2way(self, repo_name, f_path):
654 654 diff1 = request.GET.get('diff1', '')
655 655 diff2 = request.GET.get('diff2', '')
656 656 try:
657 657 if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]:
658 658 c.changeset_1 = c.db_repo_scm_instance.get_changeset(diff1)
659 659 try:
660 660 node1 = c.changeset_1.get_node(f_path)
661 661 if node1.is_dir():
662 662 raise NodeError('%s path is a %s not a file'
663 663 % (node1, type(node1)))
664 664 except NodeDoesNotExistError:
665 665 c.changeset_1 = EmptyChangeset(cs=diff1,
666 666 revision=c.changeset_1.revision,
667 667 repo=c.db_repo_scm_instance)
668 668 node1 = FileNode(f_path, '', changeset=c.changeset_1)
669 669 else:
670 670 c.changeset_1 = EmptyChangeset(repo=c.db_repo_scm_instance)
671 671 node1 = FileNode(f_path, '', changeset=c.changeset_1)
672 672
673 673 if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]:
674 674 c.changeset_2 = c.db_repo_scm_instance.get_changeset(diff2)
675 675 try:
676 676 node2 = c.changeset_2.get_node(f_path)
677 677 if node2.is_dir():
678 678 raise NodeError('%s path is a %s not a file'
679 679 % (node2, type(node2)))
680 680 except NodeDoesNotExistError:
681 681 c.changeset_2 = EmptyChangeset(cs=diff2,
682 682 revision=c.changeset_2.revision,
683 683 repo=c.db_repo_scm_instance)
684 684 node2 = FileNode(f_path, '', changeset=c.changeset_2)
685 685 else:
686 686 c.changeset_2 = EmptyChangeset(repo=c.db_repo_scm_instance)
687 687 node2 = FileNode(f_path, '', changeset=c.changeset_2)
688 688 except ChangesetDoesNotExistError as e:
689 689 msg = _('Such revision does not exist for this repository')
690 690 h.flash(msg, category='error')
691 691 raise HTTPNotFound()
692 692 c.node1 = node1
693 693 c.node2 = node2
694 694 c.cs1 = c.changeset_1
695 695 c.cs2 = c.changeset_2
696 696
697 697 return render('files/diff_2way.html')
698 698
699 699 def _get_node_history(self, cs, f_path, changesets=None):
700 700 """
701 701 get changesets history for given node
702 702
703 703 :param cs: changeset to calculate history
704 704 :param f_path: path for node to calculate history for
705 705 :param changesets: if passed don't calculate history and take
706 706 changesets defined in this list
707 707 """
708 708 # calculate history based on tip
709 709 tip_cs = c.db_repo_scm_instance.get_changeset()
710 710 if changesets is None:
711 711 try:
712 712 changesets = tip_cs.get_file_history(f_path)
713 713 except (NodeDoesNotExistError, ChangesetError):
714 714 # this node is not present at tip !
715 715 changesets = cs.get_file_history(f_path)
716 716 hist_l = []
717 717
718 718 changesets_group = ([], _("Changesets"))
719 719 branches_group = ([], _("Branches"))
720 720 tags_group = ([], _("Tags"))
721 721 for chs in changesets:
722 722 # TODO: loop over chs.branches ... but that will not give all the bogus None branches for Git ...
723 723 _branch = chs.branch
724 724 n_desc = '%s (%s)' % (h.show_id(chs), _branch)
725 725 changesets_group[0].append((chs.raw_id, n_desc,))
726 726 hist_l.append(changesets_group)
727 727
728 728 for name, chs in c.db_repo_scm_instance.branches.items():
729 729 branches_group[0].append((chs, name),)
730 730 hist_l.append(branches_group)
731 731
732 732 for name, chs in c.db_repo_scm_instance.tags.items():
733 733 tags_group[0].append((chs, name),)
734 734 hist_l.append(tags_group)
735 735
736 736 return hist_l, changesets
737 737
738 738 @LoginRequired(allow_default_user=True)
739 739 @HasRepoPermissionLevelDecorator('read')
740 740 @jsonify
741 741 def nodelist(self, repo_name, revision, f_path):
742 742 if request.environ.get('HTTP_X_PARTIAL_XHR'):
743 743 cs = self.__get_cs(revision)
744 744 _d, _f = ScmModel().get_nodes(repo_name, cs.raw_id, f_path,
745 745 flat=False)
746 746 return {'nodes': _d + _f}
@@ -1,1392 +1,1389 b''
1 1 # -*- coding: utf-8 -*-
2 2 # This program is free software: you can redistribute it and/or modify
3 3 # it under the terms of the GNU General Public License as published by
4 4 # the Free Software Foundation, either version 3 of the License, or
5 5 # (at your option) any later version.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 """
15 15 Helper functions
16 16
17 17 Consists of functions to typically be used within templates, but also
18 18 available to Controllers. This module is available to both as 'h'.
19 19 """
20 20 import hashlib
21 21 import json
22 22 import logging
23 23 import random
24 24 import re
25 25 import textwrap
26 26 import urllib.parse
27 27
28 28 from beaker.cache import cache_region
29 29 from pygments import highlight as code_highlight
30 30 from pygments.formatters.html import HtmlFormatter
31 31 from tg.i18n import ugettext as _
32 32 from webhelpers2.html import HTML, escape, literal
33 33 from webhelpers2.html.tags import NotGiven, Option, Options, _input, _make_safe_id_component, checkbox, end_form
34 34 from webhelpers2.html.tags import form as insecure_form
35 35 from webhelpers2.html.tags import hidden, link_to, password, radio
36 36 from webhelpers2.html.tags import select as webhelpers2_select
37 37 from webhelpers2.html.tags import submit, text, textarea
38 38 from webhelpers2.number import format_byte_size
39 39 from webhelpers2.text import chop_at, truncate, wrap_paragraphs
40 40
41 import kallithea
41 42 from kallithea.config.routing import url
42 43 from kallithea.lib.annotate import annotate_highlight
43 44 #==============================================================================
44 45 # PERMS
45 46 #==============================================================================
46 47 from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoPermissionLevel
47 48 from kallithea.lib.markup_renderer import url_re
48 49 from kallithea.lib.pygmentsutils import get_custom_lexer
49 50 from kallithea.lib.utils2 import MENTIONS_REGEX, AttributeDict
50 51 from kallithea.lib.utils2 import age as _age
51 52 from kallithea.lib.utils2 import asbool, credentials_filter, safe_bytes, safe_int, safe_str, time_to_datetime
52 53 from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
53 54 from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError
54 55 #==============================================================================
55 56 # SCM FILTERS available via h.
56 57 #==============================================================================
57 58 from kallithea.lib.vcs.utils import author_email, author_name
58 59
59 60
60 61 # mute pyflakes "imported but unused"
61 62 assert Option
62 63 assert checkbox
63 64 assert end_form
64 65 assert password
65 66 assert radio
66 67 assert submit
67 68 assert text
68 69 assert textarea
69 70 assert format_byte_size
70 71 assert chop_at
71 72 assert wrap_paragraphs
72 73 assert HasPermissionAny
73 74 assert HasRepoGroupPermissionLevel
74 75 assert HasRepoPermissionLevel
75 76 assert time_to_datetime
76 77 assert EmptyChangeset
77 78
78 79
79 80 log = logging.getLogger(__name__)
80 81
81 82
82 83 def canonical_url(*args, **kargs):
83 84 '''Like url(x, qualified=True), but returns url that not only is qualified
84 85 but also canonical, as configured in canonical_url'''
85 from kallithea import CONFIG
86 86 try:
87 parts = CONFIG.get('canonical_url', '').split('://', 1)
87 parts = kallithea.CONFIG.get('canonical_url', '').split('://', 1)
88 88 kargs['host'] = parts[1]
89 89 kargs['protocol'] = parts[0]
90 90 except IndexError:
91 91 kargs['qualified'] = True
92 92 return url(*args, **kargs)
93 93
94 94
95 95 def canonical_hostname():
96 96 '''Return canonical hostname of system'''
97 from kallithea import CONFIG
98 97 try:
99 parts = CONFIG.get('canonical_url', '').split('://', 1)
98 parts = kallithea.CONFIG.get('canonical_url', '').split('://', 1)
100 99 return parts[1].split('/', 1)[0]
101 100 except IndexError:
102 101 parts = url('home', qualified=True).split('://', 1)
103 102 return parts[1].split('/', 1)[0]
104 103
105 104
106 105 def html_escape(s):
107 106 """Return string with all html escaped.
108 107 This is also safe for javascript in html but not necessarily correct.
109 108 """
110 109 return (s
111 110 .replace('&', '&amp;')
112 111 .replace(">", "&gt;")
113 112 .replace("<", "&lt;")
114 113 .replace('"', "&quot;")
115 114 .replace("'", "&apos;") # Note: this is HTML5 not HTML4 and might not work in mails
116 115 )
117 116
118 117 def js(value):
119 118 """Convert Python value to the corresponding JavaScript representation.
120 119
121 120 This is necessary to safely insert arbitrary values into HTML <script>
122 121 sections e.g. using Mako template expression substitution.
123 122
124 123 Note: Rather than using this function, it's preferable to avoid the
125 124 insertion of values into HTML <script> sections altogether. Instead,
126 125 data should (to the extent possible) be passed to JavaScript using
127 126 data attributes or AJAX calls, eliminating the need for JS specific
128 127 escaping.
129 128
130 129 Note: This is not safe for use in attributes (e.g. onclick), because
131 130 quotes are not escaped.
132 131
133 132 Because the rules for parsing <script> varies between XHTML (where
134 133 normal rules apply for any special characters) and HTML (where
135 134 entities are not interpreted, but the literal string "</script>"
136 135 is forbidden), the function ensures that the result never contains
137 136 '&', '<' and '>', thus making it safe in both those contexts (but
138 137 not in attributes).
139 138 """
140 139 return literal(
141 140 ('(' + json.dumps(value) + ')')
142 141 # In JSON, the following can only appear in string literals.
143 142 .replace('&', r'\x26')
144 143 .replace('<', r'\x3c')
145 144 .replace('>', r'\x3e')
146 145 )
147 146
148 147
149 148 def jshtml(val):
150 149 """HTML escapes a string value, then converts the resulting string
151 150 to its corresponding JavaScript representation (see `js`).
152 151
153 152 This is used when a plain-text string (possibly containing special
154 153 HTML characters) will be used by a script in an HTML context (e.g.
155 154 element.innerHTML or jQuery's 'html' method).
156 155
157 156 If in doubt, err on the side of using `jshtml` over `js`, since it's
158 157 better to escape too much than too little.
159 158 """
160 159 return js(escape(val))
161 160
162 161
163 162 def shorter(s, size=20, firstline=False, postfix='...'):
164 163 """Truncate s to size, including the postfix string if truncating.
165 164 If firstline, truncate at newline.
166 165 """
167 166 if firstline:
168 167 s = s.split('\n', 1)[0].rstrip()
169 168 if len(s) > size:
170 169 return s[:size - len(postfix)] + postfix
171 170 return s
172 171
173 172
174 173 def reset(name, value, id=NotGiven, **attrs):
175 174 """Create a reset button, similar to webhelpers2.html.tags.submit ."""
176 175 return _input("reset", name, value, id, attrs)
177 176
178 177
179 178 def select(name, selected_values, options, id=NotGiven, **attrs):
180 179 """Convenient wrapper of webhelpers2 to let it accept options as a tuple list"""
181 180 if isinstance(options, list):
182 181 option_list = options
183 182 # Handle old value,label lists ... where value also can be value,label lists
184 183 options = Options()
185 184 for x in option_list:
186 185 if isinstance(x, tuple) and len(x) == 2:
187 186 value, label = x
188 187 elif isinstance(x, str):
189 188 value = label = x
190 189 else:
191 190 log.error('invalid select option %r', x)
192 191 raise
193 192 if isinstance(value, list):
194 193 og = options.add_optgroup(label)
195 194 for x in value:
196 195 if isinstance(x, tuple) and len(x) == 2:
197 196 group_value, group_label = x
198 197 elif isinstance(x, str):
199 198 group_value = group_label = x
200 199 else:
201 200 log.error('invalid select option %r', x)
202 201 raise
203 202 og.add_option(group_label, group_value)
204 203 else:
205 204 options.add_option(label, value)
206 205 return webhelpers2_select(name, selected_values, options, id=id, **attrs)
207 206
208 207
209 208 safeid = _make_safe_id_component
210 209
211 210
212 211 def FID(raw_id, path):
213 212 """
214 213 Creates a unique ID for filenode based on it's hash of path and revision
215 214 it's safe to use in urls
216 215 """
217 216 return 'C-%s-%s' % (short_id(raw_id), hashlib.md5(safe_bytes(path)).hexdigest()[:12])
218 217
219 218
220 219 def get_ignore_whitespace_diff(GET):
221 220 """Return true if URL requested whitespace to be ignored"""
222 221 return bool(GET.get('ignorews'))
223 222
224 223
225 224 def ignore_whitespace_link(GET, anchor=None):
226 225 """Return snippet with link to current URL with whitespace ignoring toggled"""
227 226 params = dict(GET) # ignoring duplicates
228 227 if get_ignore_whitespace_diff(GET):
229 228 params.pop('ignorews')
230 229 title = _("Show whitespace changes")
231 230 else:
232 231 params['ignorews'] = '1'
233 232 title = _("Ignore whitespace changes")
234 233 params['anchor'] = anchor
235 234 return link_to(
236 235 literal('<i class="icon-strike"></i>'),
237 236 url.current(**params),
238 237 title=title,
239 238 **{'data-toggle': 'tooltip'})
240 239
241 240
242 241 def get_diff_context_size(GET):
243 242 """Return effective context size requested in URL"""
244 243 return safe_int(GET.get('context'), default=3)
245 244
246 245
247 246 def increase_context_link(GET, anchor=None):
248 247 """Return snippet with link to current URL with double context size"""
249 248 context = get_diff_context_size(GET) * 2
250 249 params = dict(GET) # ignoring duplicates
251 250 params['context'] = str(context)
252 251 params['anchor'] = anchor
253 252 return link_to(
254 253 literal('<i class="icon-sort"></i>'),
255 254 url.current(**params),
256 255 title=_('Increase diff context to %(num)s lines') % {'num': context},
257 256 **{'data-toggle': 'tooltip'})
258 257
259 258
260 259 class _FilesBreadCrumbs(object):
261 260
262 261 def __call__(self, repo_name, rev, paths):
263 262 url_l = [link_to(repo_name, url('files_home',
264 263 repo_name=repo_name,
265 264 revision=rev, f_path=''),
266 265 class_='ypjax-link')]
267 266 paths_l = paths.split('/')
268 267 for cnt, p in enumerate(paths_l):
269 268 if p != '':
270 269 url_l.append(link_to(p,
271 270 url('files_home',
272 271 repo_name=repo_name,
273 272 revision=rev,
274 273 f_path='/'.join(paths_l[:cnt + 1])
275 274 ),
276 275 class_='ypjax-link'
277 276 )
278 277 )
279 278
280 279 return literal('/'.join(url_l))
281 280
282 281
283 282 files_breadcrumbs = _FilesBreadCrumbs()
284 283
285 284
286 285 class CodeHtmlFormatter(HtmlFormatter):
287 286 """
288 287 My code Html Formatter for source codes
289 288 """
290 289
291 290 def wrap(self, source, outfile):
292 291 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
293 292
294 293 def _wrap_code(self, source):
295 294 for cnt, it in enumerate(source):
296 295 i, t = it
297 296 t = '<span id="L%s">%s</span>' % (cnt + 1, t)
298 297 yield i, t
299 298
300 299 def _wrap_tablelinenos(self, inner):
301 300 inner_lines = []
302 301 lncount = 0
303 302 for t, line in inner:
304 303 if t:
305 304 lncount += 1
306 305 inner_lines.append(line)
307 306
308 307 fl = self.linenostart
309 308 mw = len(str(lncount + fl - 1))
310 309 sp = self.linenospecial
311 310 st = self.linenostep
312 311 la = self.lineanchors
313 312 aln = self.anchorlinenos
314 313 nocls = self.noclasses
315 314 if sp:
316 315 lines = []
317 316
318 317 for i in range(fl, fl + lncount):
319 318 if i % st == 0:
320 319 if i % sp == 0:
321 320 if aln:
322 321 lines.append('<a href="#%s%d" class="special">%*d</a>' %
323 322 (la, i, mw, i))
324 323 else:
325 324 lines.append('<span class="special">%*d</span>' % (mw, i))
326 325 else:
327 326 if aln:
328 327 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
329 328 else:
330 329 lines.append('%*d' % (mw, i))
331 330 else:
332 331 lines.append('')
333 332 ls = '\n'.join(lines)
334 333 else:
335 334 lines = []
336 335 for i in range(fl, fl + lncount):
337 336 if i % st == 0:
338 337 if aln:
339 338 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
340 339 else:
341 340 lines.append('%*d' % (mw, i))
342 341 else:
343 342 lines.append('')
344 343 ls = '\n'.join(lines)
345 344
346 345 # in case you wonder about the seemingly redundant <div> here: since the
347 346 # content in the other cell also is wrapped in a div, some browsers in
348 347 # some configurations seem to mess up the formatting...
349 348 if nocls:
350 349 yield 0, ('<table class="%stable">' % self.cssclass +
351 350 '<tr><td><div class="linenodiv">'
352 351 '<pre>' + ls + '</pre></div></td>'
353 352 '<td id="hlcode" class="code">')
354 353 else:
355 354 yield 0, ('<table class="%stable">' % self.cssclass +
356 355 '<tr><td class="linenos"><div class="linenodiv">'
357 356 '<pre>' + ls + '</pre></div></td>'
358 357 '<td id="hlcode" class="code">')
359 358 yield 0, ''.join(inner_lines)
360 359 yield 0, '</td></tr></table>'
361 360
362 361
363 362 _whitespace_re = re.compile(r'(\t)|( )(?=\n|</div>)')
364 363
365 364
366 365 def _markup_whitespace(m):
367 366 groups = m.groups()
368 367 if groups[0]:
369 368 return '<u>\t</u>'
370 369 if groups[1]:
371 370 return ' <i></i>'
372 371
373 372
374 373 def markup_whitespace(s):
375 374 return _whitespace_re.sub(_markup_whitespace, s)
376 375
377 376
378 377 def pygmentize(filenode, **kwargs):
379 378 """
380 379 pygmentize function using pygments
381 380
382 381 :param filenode:
383 382 """
384 383 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
385 384 return literal(markup_whitespace(
386 385 code_highlight(safe_str(filenode.content), lexer, CodeHtmlFormatter(**kwargs))))
387 386
388 387
389 388 def hsv_to_rgb(h, s, v):
390 389 if s == 0.0:
391 390 return v, v, v
392 391 i = int(h * 6.0) # XXX assume int() truncates!
393 392 f = (h * 6.0) - i
394 393 p = v * (1.0 - s)
395 394 q = v * (1.0 - s * f)
396 395 t = v * (1.0 - s * (1.0 - f))
397 396 i = i % 6
398 397 if i == 0:
399 398 return v, t, p
400 399 if i == 1:
401 400 return q, v, p
402 401 if i == 2:
403 402 return p, v, t
404 403 if i == 3:
405 404 return p, q, v
406 405 if i == 4:
407 406 return t, p, v
408 407 if i == 5:
409 408 return v, p, q
410 409
411 410
412 411 def gen_color(n=10000):
413 412 """generator for getting n of evenly distributed colors using
414 413 hsv color and golden ratio. It always return same order of colors
415 414
416 415 :returns: RGB tuple
417 416 """
418 417
419 418 golden_ratio = 0.618033988749895
420 419 h = 0.22717784590367374
421 420
422 421 for _unused in range(n):
423 422 h += golden_ratio
424 423 h %= 1
425 424 HSV_tuple = [h, 0.95, 0.95]
426 425 RGB_tuple = hsv_to_rgb(*HSV_tuple)
427 426 yield [str(int(x * 256)) for x in RGB_tuple]
428 427
429 428
430 429 def pygmentize_annotation(repo_name, filenode, **kwargs):
431 430 """
432 431 pygmentize function for annotation
433 432
434 433 :param filenode:
435 434 """
436 435 cgenerator = gen_color()
437 436 color_dict = {}
438 437
439 438 def get_color_string(cs):
440 439 if cs in color_dict:
441 440 col = color_dict[cs]
442 441 else:
443 442 col = color_dict[cs] = next(cgenerator)
444 443 return "color: rgb(%s)! important;" % (', '.join(col))
445 444
446 445 def url_func(changeset):
447 446 author = escape(changeset.author)
448 447 date = changeset.date
449 448 message = escape(changeset.message)
450 449 tooltip_html = ("<b>Author:</b> %s<br/>"
451 450 "<b>Date:</b> %s</b><br/>"
452 451 "<b>Message:</b> %s") % (author, date, message)
453 452
454 453 lnk_format = show_id(changeset)
455 454 uri = link_to(
456 455 lnk_format,
457 456 url('changeset_home', repo_name=repo_name,
458 457 revision=changeset.raw_id),
459 458 style=get_color_string(changeset.raw_id),
460 459 **{'data-toggle': 'popover',
461 460 'data-content': tooltip_html}
462 461 )
463 462
464 463 uri += '\n'
465 464 return uri
466 465
467 466 return literal(markup_whitespace(annotate_highlight(filenode, url_func, **kwargs)))
468 467
469 468
470 469 class _Message(object):
471 470 """A message returned by ``pop_flash_messages()``.
472 471
473 472 Converting the message to a string returns the message text. Instances
474 473 also have the following attributes:
475 474
476 475 * ``category``: the category specified when the message was created.
477 476 * ``message``: the html-safe message text.
478 477 """
479 478
480 479 def __init__(self, category, message):
481 480 self.category = category
482 481 self.message = message
483 482
484 483
485 484 def _session_flash_messages(append=None, clear=False):
486 485 """Manage a message queue in tg.session: return the current message queue
487 486 after appending the given message, and possibly clearing the queue."""
488 487 key = 'flash'
489 488 from tg import session
490 489 if key in session:
491 490 flash_messages = session[key]
492 491 else:
493 492 if append is None: # common fast path - also used for clearing empty queue
494 493 return [] # don't bother saving
495 494 flash_messages = []
496 495 session[key] = flash_messages
497 496 if append is not None and append not in flash_messages:
498 497 flash_messages.append(append)
499 498 if clear:
500 499 session.pop(key, None)
501 500 session.save()
502 501 return flash_messages
503 502
504 503
505 504 def flash(message, category, logf=None):
506 505 """
507 506 Show a message to the user _and_ log it through the specified function
508 507
509 508 category: notice (default), warning, error, success
510 509 logf: a custom log function - such as log.debug
511 510
512 511 logf defaults to log.info, unless category equals 'success', in which
513 512 case logf defaults to log.debug.
514 513 """
515 514 assert category in ('error', 'success', 'warning'), category
516 515 if hasattr(message, '__html__'):
517 516 # render to HTML for storing in cookie
518 517 safe_message = str(message)
519 518 else:
520 519 # Apply str - the message might be an exception with __str__
521 520 # Escape, so we can trust the result without further escaping, without any risk of injection
522 521 safe_message = html_escape(str(message))
523 522 if logf is None:
524 523 logf = log.info
525 524 if category == 'success':
526 525 logf = log.debug
527 526
528 527 logf('Flash %s: %s', category, safe_message)
529 528
530 529 _session_flash_messages(append=(category, safe_message))
531 530
532 531
533 532 def pop_flash_messages():
534 533 """Return all accumulated messages and delete them from the session.
535 534
536 535 The return value is a list of ``Message`` objects.
537 536 """
538 537 return [_Message(category, message) for category, message in _session_flash_messages(clear=True)]
539 538
540 539
541 540 def age(x, y=False):
542 541 return _age(x, y)
543 542
544 543 def capitalize(x):
545 544 return x.capitalize()
546 545
547 546 email = author_email
548 547
549 548 def short_id(x):
550 549 return x[:12]
551 550
552 551 def hide_credentials(x):
553 552 return ''.join(credentials_filter(x))
554 553
555 554
556 555 def show_id(cs):
557 556 """
558 557 Configurable function that shows ID
559 558 by default it's r123:fffeeefffeee
560 559
561 560 :param cs: changeset instance
562 561 """
563 from kallithea import CONFIG
564 def_len = safe_int(CONFIG.get('show_sha_length', 12))
565 show_rev = asbool(CONFIG.get('show_revision_number', False))
562 def_len = safe_int(kallithea.CONFIG.get('show_sha_length', 12))
563 show_rev = asbool(kallithea.CONFIG.get('show_revision_number', False))
566 564
567 565 raw_id = cs.raw_id[:def_len]
568 566 if show_rev:
569 567 return 'r%s:%s' % (cs.revision, raw_id)
570 568 else:
571 569 return raw_id
572 570
573 571
574 572 def fmt_date(date):
575 573 if date:
576 574 return date.strftime("%Y-%m-%d %H:%M:%S")
577 575 return ""
578 576
579 577
580 578 def is_git(repository):
581 579 if hasattr(repository, 'alias'):
582 580 _type = repository.alias
583 581 elif hasattr(repository, 'repo_type'):
584 582 _type = repository.repo_type
585 583 else:
586 584 _type = repository
587 585 return _type == 'git'
588 586
589 587
590 588 def is_hg(repository):
591 589 if hasattr(repository, 'alias'):
592 590 _type = repository.alias
593 591 elif hasattr(repository, 'repo_type'):
594 592 _type = repository.repo_type
595 593 else:
596 594 _type = repository
597 595 return _type == 'hg'
598 596
599 597
600 598 @cache_region('long_term', 'user_attr_or_none')
601 599 def user_attr_or_none(author, show_attr):
602 600 """Try to match email part of VCS committer string with a local user and return show_attr
603 601 - or return None if user not found"""
604 602 email = author_email(author)
605 603 if email:
606 604 from kallithea.model.db import User
607 605 user = User.get_by_email(email)
608 606 if user is not None:
609 607 return getattr(user, show_attr)
610 608 return None
611 609
612 610
613 611 def email_or_none(author):
614 612 """Try to match email part of VCS committer string with a local user.
615 613 Return primary email of user, email part of the specified author name, or None."""
616 614 if not author:
617 615 return None
618 616 email = user_attr_or_none(author, 'email')
619 617 if email is not None:
620 618 return email # always use user's main email address - not necessarily the one used to find user
621 619
622 620 # extract email from the commit string
623 621 email = author_email(author)
624 622 if email:
625 623 return email
626 624
627 625 # No valid email, not a valid user in the system, none!
628 626 return None
629 627
630 628
631 629 def person(author, show_attr="username"):
632 630 """Find the user identified by 'author', return one of the users attributes,
633 631 default to the username attribute, None if there is no user"""
634 632 from kallithea.model.db import User
635 633
636 634 # if author is already an instance use it for extraction
637 635 if isinstance(author, User):
638 636 return getattr(author, show_attr)
639 637
640 638 value = user_attr_or_none(author, show_attr)
641 639 if value is not None:
642 640 return value
643 641
644 642 # Still nothing? Just pass back the author name if any, else the email
645 643 return author_name(author) or email(author)
646 644
647 645
648 646 def person_by_id(id_, show_attr="username"):
649 647 from kallithea.model.db import User
650 648
651 649 # maybe it's an ID ?
652 650 if str(id_).isdigit() or isinstance(id_, int):
653 651 id_ = int(id_)
654 652 user = User.get(id_)
655 653 if user is not None:
656 654 return getattr(user, show_attr)
657 655 return id_
658 656
659 657
660 658 def boolicon(value):
661 659 """Returns boolean value of a value, represented as small html image of true/false
662 660 icons
663 661
664 662 :param value: value
665 663 """
666 664
667 665 if value:
668 666 return HTML.tag('i', class_="icon-ok")
669 667 else:
670 668 return HTML.tag('i', class_="icon-minus-circled")
671 669
672 670
673 671 def action_parser(user_log, feed=False, parse_cs=False):
674 672 """
675 673 This helper will action_map the specified string action into translated
676 674 fancy names with icons and links
677 675
678 676 :param user_log: user log instance
679 677 :param feed: use output for feeds (no html and fancy icons)
680 678 :param parse_cs: parse Changesets into VCS instances
681 679 """
682 680
683 681 action = user_log.action
684 682 action_params = ' '
685 683
686 684 x = action.split(':')
687 685
688 686 if len(x) > 1:
689 687 action, action_params = x
690 688
691 689 def get_cs_links():
692 690 revs_limit = 3 # display this amount always
693 691 revs_top_limit = 50 # show upto this amount of changesets hidden
694 692 revs_ids = action_params.split(',')
695 693 deleted = user_log.repository is None
696 694 if deleted:
697 695 return ','.join(revs_ids)
698 696
699 697 repo_name = user_log.repository.repo_name
700 698
701 699 def lnk(rev, repo_name):
702 700 lazy_cs = False
703 701 title_ = None
704 702 url_ = '#'
705 703 if isinstance(rev, BaseChangeset) or isinstance(rev, AttributeDict):
706 704 if rev.op and rev.ref_name:
707 705 if rev.op == 'delete_branch':
708 706 lbl = _('Deleted branch: %s') % rev.ref_name
709 707 elif rev.op == 'tag':
710 708 lbl = _('Created tag: %s') % rev.ref_name
711 709 else:
712 710 lbl = 'Unknown operation %s' % rev.op
713 711 else:
714 712 lazy_cs = True
715 713 lbl = rev.short_id[:8]
716 714 url_ = url('changeset_home', repo_name=repo_name,
717 715 revision=rev.raw_id)
718 716 else:
719 717 # changeset cannot be found - it might have been stripped or removed
720 718 lbl = rev[:12]
721 719 title_ = _('Changeset %s not found') % lbl
722 720 if parse_cs:
723 721 return link_to(lbl, url_, title=title_, **{'data-toggle': 'tooltip'})
724 722 return link_to(lbl, url_, class_='lazy-cs' if lazy_cs else '',
725 723 **{'data-raw_id': rev.raw_id, 'data-repo_name': repo_name})
726 724
727 725 def _get_op(rev_txt):
728 726 _op = None
729 727 _name = rev_txt
730 728 if len(rev_txt.split('=>')) == 2:
731 729 _op, _name = rev_txt.split('=>')
732 730 return _op, _name
733 731
734 732 revs = []
735 733 if len([v for v in revs_ids if v != '']) > 0:
736 734 repo = None
737 735 for rev in revs_ids[:revs_top_limit]:
738 736 _op, _name = _get_op(rev)
739 737
740 738 # we want parsed changesets, or new log store format is bad
741 739 if parse_cs:
742 740 try:
743 741 if repo is None:
744 742 repo = user_log.repository.scm_instance
745 743 _rev = repo.get_changeset(rev)
746 744 revs.append(_rev)
747 745 except ChangesetDoesNotExistError:
748 746 log.error('cannot find revision %s in this repo', rev)
749 747 revs.append(rev)
750 748 else:
751 749 _rev = AttributeDict({
752 750 'short_id': rev[:12],
753 751 'raw_id': rev,
754 752 'message': '',
755 753 'op': _op,
756 754 'ref_name': _name
757 755 })
758 756 revs.append(_rev)
759 757 cs_links = [" " + ', '.join(
760 758 [lnk(rev, repo_name) for rev in revs[:revs_limit]]
761 759 )]
762 760 _op1, _name1 = _get_op(revs_ids[0])
763 761 _op2, _name2 = _get_op(revs_ids[-1])
764 762
765 763 _rev = '%s...%s' % (_name1, _name2)
766 764
767 765 compare_view = (
768 766 ' <div class="compare_view" data-toggle="tooltip" title="%s">'
769 767 '<a href="%s">%s</a> </div>' % (
770 768 _('Show all combined changesets %s->%s') % (
771 769 revs_ids[0][:12], revs_ids[-1][:12]
772 770 ),
773 771 url('changeset_home', repo_name=repo_name,
774 772 revision=_rev
775 773 ),
776 774 _('Compare view')
777 775 )
778 776 )
779 777
780 778 # if we have exactly one more than normally displayed
781 779 # just display it, takes less space than displaying
782 780 # "and 1 more revisions"
783 781 if len(revs_ids) == revs_limit + 1:
784 782 cs_links.append(", " + lnk(revs[revs_limit], repo_name))
785 783
786 784 # hidden-by-default ones
787 785 if len(revs_ids) > revs_limit + 1:
788 786 uniq_id = revs_ids[0]
789 787 html_tmpl = (
790 788 '<span> %s <a class="show_more" id="_%s" '
791 789 'href="#more">%s</a> %s</span>'
792 790 )
793 791 if not feed:
794 792 cs_links.append(html_tmpl % (
795 793 _('and'),
796 794 uniq_id, _('%s more') % (len(revs_ids) - revs_limit),
797 795 _('revisions')
798 796 )
799 797 )
800 798
801 799 if not feed:
802 800 html_tmpl = '<span id="%s" style="display:none">, %s </span>'
803 801 else:
804 802 html_tmpl = '<span id="%s"> %s </span>'
805 803
806 804 morelinks = ', '.join(
807 805 [lnk(rev, repo_name) for rev in revs[revs_limit:]]
808 806 )
809 807
810 808 if len(revs_ids) > revs_top_limit:
811 809 morelinks += ', ...'
812 810
813 811 cs_links.append(html_tmpl % (uniq_id, morelinks))
814 812 if len(revs) > 1:
815 813 cs_links.append(compare_view)
816 814 return ''.join(cs_links)
817 815
818 816 def get_fork_name():
819 817 repo_name = action_params
820 818 url_ = url('summary_home', repo_name=repo_name)
821 819 return _('Fork name %s') % link_to(action_params, url_)
822 820
823 821 def get_user_name():
824 822 user_name = action_params
825 823 return user_name
826 824
827 825 def get_users_group():
828 826 group_name = action_params
829 827 return group_name
830 828
831 829 def get_pull_request():
832 830 from kallithea.model.db import PullRequest
833 831 pull_request_id = action_params
834 832 nice_id = PullRequest.make_nice_id(pull_request_id)
835 833
836 834 deleted = user_log.repository is None
837 835 if deleted:
838 836 repo_name = user_log.repository_name
839 837 else:
840 838 repo_name = user_log.repository.repo_name
841 839
842 840 return link_to(_('Pull request %s') % nice_id,
843 841 url('pullrequest_show', repo_name=repo_name,
844 842 pull_request_id=pull_request_id))
845 843
846 844 def get_archive_name():
847 845 archive_name = action_params
848 846 return archive_name
849 847
850 848 # action : translated str, callback(extractor), icon
851 849 action_map = {
852 850 'user_deleted_repo': (_('[deleted] repository'),
853 851 None, 'icon-trashcan'),
854 852 'user_created_repo': (_('[created] repository'),
855 853 None, 'icon-plus'),
856 854 'user_created_fork': (_('[created] repository as fork'),
857 855 None, 'icon-fork'),
858 856 'user_forked_repo': (_('[forked] repository'),
859 857 get_fork_name, 'icon-fork'),
860 858 'user_updated_repo': (_('[updated] repository'),
861 859 None, 'icon-pencil'),
862 860 'user_downloaded_archive': (_('[downloaded] archive from repository'),
863 861 get_archive_name, 'icon-download-cloud'),
864 862 'admin_deleted_repo': (_('[delete] repository'),
865 863 None, 'icon-trashcan'),
866 864 'admin_created_repo': (_('[created] repository'),
867 865 None, 'icon-plus'),
868 866 'admin_forked_repo': (_('[forked] repository'),
869 867 None, 'icon-fork'),
870 868 'admin_updated_repo': (_('[updated] repository'),
871 869 None, 'icon-pencil'),
872 870 'admin_created_user': (_('[created] user'),
873 871 get_user_name, 'icon-user'),
874 872 'admin_updated_user': (_('[updated] user'),
875 873 get_user_name, 'icon-user'),
876 874 'admin_created_users_group': (_('[created] user group'),
877 875 get_users_group, 'icon-pencil'),
878 876 'admin_updated_users_group': (_('[updated] user group'),
879 877 get_users_group, 'icon-pencil'),
880 878 'user_commented_revision': (_('[commented] on revision in repository'),
881 879 get_cs_links, 'icon-comment'),
882 880 'user_commented_pull_request': (_('[commented] on pull request for'),
883 881 get_pull_request, 'icon-comment'),
884 882 'user_closed_pull_request': (_('[closed] pull request for'),
885 883 get_pull_request, 'icon-ok'),
886 884 'push': (_('[pushed] into'),
887 885 get_cs_links, 'icon-move-up'),
888 886 'push_local': (_('[committed via Kallithea] into repository'),
889 887 get_cs_links, 'icon-pencil'),
890 888 'push_remote': (_('[pulled from remote] into repository'),
891 889 get_cs_links, 'icon-move-up'),
892 890 'pull': (_('[pulled] from'),
893 891 None, 'icon-move-down'),
894 892 'started_following_repo': (_('[started following] repository'),
895 893 None, 'icon-heart'),
896 894 'stopped_following_repo': (_('[stopped following] repository'),
897 895 None, 'icon-heart-empty'),
898 896 }
899 897
900 898 action_str = action_map.get(action, action)
901 899 if feed:
902 900 action = action_str[0].replace('[', '').replace(']', '')
903 901 else:
904 902 action = action_str[0] \
905 903 .replace('[', '<b>') \
906 904 .replace(']', '</b>')
907 905
908 906 action_params_func = action_str[1] if callable(action_str[1]) else (lambda: "")
909 907
910 908 def action_parser_icon():
911 909 action = user_log.action
912 910 action_params = None
913 911 x = action.split(':')
914 912
915 913 if len(x) > 1:
916 914 action, action_params = x
917 915
918 916 ico = action_map.get(action, ['', '', ''])[2]
919 917 html = """<i class="%s"></i>""" % ico
920 918 return literal(html)
921 919
922 920 # returned callbacks we need to call to get
923 921 return [lambda: literal(action), action_params_func, action_parser_icon]
924 922
925 923
926 924 #==============================================================================
927 925 # GRAVATAR URL
928 926 #==============================================================================
929 927 def gravatar_div(email_address, cls='', size=30, **div_attributes):
930 928 """Return an html literal with a span around a gravatar if they are enabled.
931 929 Extra keyword parameters starting with 'div_' will get the prefix removed
932 930 and '_' changed to '-' and be used as attributes on the div. The default
933 931 class is 'gravatar'.
934 932 """
935 933 from tg import tmpl_context as c
936 934 if not c.visual.use_gravatar:
937 935 return ''
938 936 if 'div_class' not in div_attributes:
939 937 div_attributes['div_class'] = "gravatar"
940 938 attributes = []
941 939 for k, v in sorted(div_attributes.items()):
942 940 assert k.startswith('div_'), k
943 941 attributes.append(' %s="%s"' % (k[4:].replace('_', '-'), escape(v)))
944 942 return literal("""<span%s>%s</span>""" %
945 943 (''.join(attributes),
946 944 gravatar(email_address, cls=cls, size=size)))
947 945
948 946
949 947 def gravatar(email_address, cls='', size=30):
950 948 """return html element of the gravatar
951 949
952 950 This method will return an <img> with the resolution double the size (for
953 951 retina screens) of the image. If the url returned from gravatar_url is
954 952 empty then we fallback to using an icon.
955 953
956 954 """
957 955 from tg import tmpl_context as c
958 956 if not c.visual.use_gravatar:
959 957 return ''
960 958
961 959 src = gravatar_url(email_address, size * 2)
962 960
963 961 if src:
964 962 # here it makes sense to use style="width: ..." (instead of, say, a
965 963 # stylesheet) because we using this to generate a high-res (retina) size
966 964 html = ('<i class="icon-gravatar {cls}"'
967 965 ' style="font-size: {size}px;background-size: {size}px;background-image: url(\'{src}\')"'
968 966 '></i>').format(cls=cls, size=size, src=src)
969 967
970 968 else:
971 969 # if src is empty then there was no gravatar, so we use a font icon
972 970 html = ("""<i class="icon-user {cls}" style="font-size: {size}px;"></i>"""
973 971 .format(cls=cls, size=size))
974 972
975 973 return literal(html)
976 974
977 975
978 976 def gravatar_url(email_address, size=30, default=''):
979 977 from tg import tmpl_context as c
980 978
981 979 if not c.visual.use_gravatar:
982 980 return ""
983 981
984 982 _def = 'anonymous@kallithea-scm.org' # default gravatar
985 983 email_address = email_address or _def
986 984
987 985 if email_address == _def:
988 986 return default
989 987
990 988 # re-import url so tests can mock it
991 989 from kallithea.config.routing import url
992 990 from kallithea.model.db import User
993 991
994 992 parsed_url = urllib.parse.urlparse(url.current(qualified=True))
995 993 url = (c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL) \
996 994 .replace('{email}', email_address) \
997 995 .replace('{md5email}', hashlib.md5(safe_bytes(email_address).lower()).hexdigest()) \
998 996 .replace('{netloc}', parsed_url.netloc) \
999 997 .replace('{scheme}', parsed_url.scheme) \
1000 998 .replace('{size}', str(size))
1001 999 return url
1002 1000
1003 1001
1004 1002 def changed_tooltip(nodes):
1005 1003 """
1006 1004 Generates a html string for changed nodes in changeset page.
1007 1005 It limits the output to 30 entries
1008 1006
1009 1007 :param nodes: LazyNodesGenerator
1010 1008 """
1011 1009 if nodes:
1012 1010 pref = ': <br/> '
1013 1011 suf = ''
1014 1012 if len(nodes) > 30:
1015 1013 suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
1016 1014 return literal(pref + '<br/> '.join([x.path
1017 1015 for x in nodes[:30]]) + suf)
1018 1016 else:
1019 1017 return ': ' + _('No files')
1020 1018
1021 1019
1022 1020 def fancy_file_stats(stats):
1023 1021 """
1024 1022 Displays a fancy two colored bar for number of added/deleted
1025 1023 lines of code on file
1026 1024
1027 1025 :param stats: two element list of added/deleted lines of code
1028 1026 """
1029 1027 from kallithea.lib.diffs import BIN_FILENODE, CHMOD_FILENODE, DEL_FILENODE, MOD_FILENODE, NEW_FILENODE, RENAMED_FILENODE
1030 1028
1031 1029 a, d = stats['added'], stats['deleted']
1032 1030 width = 100
1033 1031
1034 1032 if stats['binary']:
1035 1033 # binary mode
1036 1034 lbl = ''
1037 1035 bin_op = 1
1038 1036
1039 1037 if BIN_FILENODE in stats['ops']:
1040 1038 lbl = 'bin+'
1041 1039
1042 1040 if NEW_FILENODE in stats['ops']:
1043 1041 lbl += _('new file')
1044 1042 bin_op = NEW_FILENODE
1045 1043 elif MOD_FILENODE in stats['ops']:
1046 1044 lbl += _('mod')
1047 1045 bin_op = MOD_FILENODE
1048 1046 elif DEL_FILENODE in stats['ops']:
1049 1047 lbl += _('del')
1050 1048 bin_op = DEL_FILENODE
1051 1049 elif RENAMED_FILENODE in stats['ops']:
1052 1050 lbl += _('rename')
1053 1051 bin_op = RENAMED_FILENODE
1054 1052
1055 1053 # chmod can go with other operations
1056 1054 if CHMOD_FILENODE in stats['ops']:
1057 1055 _org_lbl = _('chmod')
1058 1056 lbl += _org_lbl if lbl.endswith('+') else '+%s' % _org_lbl
1059 1057
1060 1058 #import ipdb;ipdb.set_trace()
1061 1059 b_d = '<div class="bin bin%s progress-bar" style="width:100%%">%s</div>' % (bin_op, lbl)
1062 1060 b_a = '<div class="bin bin1" style="width:0%"></div>'
1063 1061 return literal('<div style="width:%spx" class="progress">%s%s</div>' % (width, b_a, b_d))
1064 1062
1065 1063 t = stats['added'] + stats['deleted']
1066 1064 unit = float(width) / (t or 1)
1067 1065
1068 1066 # needs > 9% of width to be visible or 0 to be hidden
1069 1067 a_p = max(9, unit * a) if a > 0 else 0
1070 1068 d_p = max(9, unit * d) if d > 0 else 0
1071 1069 p_sum = a_p + d_p
1072 1070
1073 1071 if p_sum > width:
1074 1072 # adjust the percentage to be == 100% since we adjusted to 9
1075 1073 if a_p > d_p:
1076 1074 a_p = a_p - (p_sum - width)
1077 1075 else:
1078 1076 d_p = d_p - (p_sum - width)
1079 1077
1080 1078 a_v = a if a > 0 else ''
1081 1079 d_v = d if d > 0 else ''
1082 1080
1083 1081 d_a = '<div class="added progress-bar" style="width:%s%%">%s</div>' % (
1084 1082 a_p, a_v
1085 1083 )
1086 1084 d_d = '<div class="deleted progress-bar" style="width:%s%%">%s</div>' % (
1087 1085 d_p, d_v
1088 1086 )
1089 1087 return literal('<div class="progress" style="width:%spx">%s%s</div>' % (width, d_a, d_d))
1090 1088
1091 1089
1092 1090 _URLIFY_RE = re.compile(r'''
1093 1091 # URL markup
1094 1092 (?P<url>%s) |
1095 1093 # @mention markup
1096 1094 (?P<mention>%s) |
1097 1095 # Changeset hash markup
1098 1096 (?<!\w|[-_])
1099 1097 (?P<hash>[0-9a-f]{12,40})
1100 1098 (?!\w|[-_]) |
1101 1099 # Markup of *bold text*
1102 1100 (?:
1103 1101 (?:^|(?<=\s))
1104 1102 (?P<bold> [*] (?!\s) [^*\n]* (?<!\s) [*] )
1105 1103 (?![*\w])
1106 1104 ) |
1107 1105 # "Stylize" markup
1108 1106 \[see\ \=&gt;\ *(?P<seen>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] |
1109 1107 \[license\ \=&gt;\ *(?P<license>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] |
1110 1108 \[(?P<tagtype>requires|recommends|conflicts|base)\ \=&gt;\ *(?P<tagvalue>[a-zA-Z0-9\-\/]*)\] |
1111 1109 \[(?:lang|language)\ \=&gt;\ *(?P<lang>[a-zA-Z\-\/\#\+]*)\] |
1112 1110 \[(?P<tag>[a-z]+)\]
1113 1111 ''' % (url_re.pattern, MENTIONS_REGEX.pattern),
1114 1112 re.VERBOSE | re.MULTILINE | re.IGNORECASE)
1115 1113
1116 1114
1117 1115 def urlify_text(s, repo_name=None, link_=None, truncate=None, stylize=False, truncatef=truncate):
1118 1116 """
1119 1117 Parses given text message and make literal html with markup.
1120 1118 The text will be truncated to the specified length.
1121 1119 Hashes are turned into changeset links to specified repository.
1122 1120 URLs links to what they say.
1123 1121 Issues are linked to given issue-server.
1124 1122 If link_ is provided, all text not already linking somewhere will link there.
1125 1123 >>> urlify_text("Urlify http://example.com/ and 'https://example.com' *and* <b>markup/b>")
1126 1124 literal('Urlify <a href="http://example.com/">http://example.com/</a> and &#39;<a href="https://example.com&apos">https://example.com&apos</a>; <b>*and*</b> &lt;b&gt;markup/b&gt;')
1127 1125 """
1128 1126
1129 1127 def _replace(match_obj):
1130 1128 url = match_obj.group('url')
1131 1129 if url is not None:
1132 1130 return '<a href="%(url)s">%(url)s</a>' % {'url': url}
1133 1131 mention = match_obj.group('mention')
1134 1132 if mention is not None:
1135 1133 return '<b>%s</b>' % mention
1136 1134 hash_ = match_obj.group('hash')
1137 1135 if hash_ is not None and repo_name is not None:
1138 1136 from kallithea.config.routing import url # doh, we need to re-import url to mock it later
1139 1137 return '<a class="changeset_hash" href="%(url)s">%(hash)s</a>' % {
1140 1138 'url': url('changeset_home', repo_name=repo_name, revision=hash_),
1141 1139 'hash': hash_,
1142 1140 }
1143 1141 bold = match_obj.group('bold')
1144 1142 if bold is not None:
1145 1143 return '<b>*%s*</b>' % _urlify(bold[1:-1])
1146 1144 if stylize:
1147 1145 seen = match_obj.group('seen')
1148 1146 if seen:
1149 1147 return '<div class="label label-meta" data-tag="see">see =&gt; %s</div>' % seen
1150 1148 license = match_obj.group('license')
1151 1149 if license:
1152 1150 return '<div class="label label-meta" data-tag="license"><a href="http://www.opensource.org/licenses/%s">%s</a></div>' % (license, license)
1153 1151 tagtype = match_obj.group('tagtype')
1154 1152 if tagtype:
1155 1153 tagvalue = match_obj.group('tagvalue')
1156 1154 return '<div class="label label-meta" data-tag="%s">%s =&gt; <a href="/%s">%s</a></div>' % (tagtype, tagtype, tagvalue, tagvalue)
1157 1155 lang = match_obj.group('lang')
1158 1156 if lang:
1159 1157 return '<div class="label label-meta" data-tag="lang">%s</div>' % lang
1160 1158 tag = match_obj.group('tag')
1161 1159 if tag:
1162 1160 return '<div class="label label-meta" data-tag="%s">%s</div>' % (tag, tag)
1163 1161 return match_obj.group(0)
1164 1162
1165 1163 def _urlify(s):
1166 1164 """
1167 1165 Extract urls from text and make html links out of them
1168 1166 """
1169 1167 return _URLIFY_RE.sub(_replace, s)
1170 1168
1171 1169 if truncate is None:
1172 1170 s = s.rstrip()
1173 1171 else:
1174 1172 s = truncatef(s, truncate, whole_word=True)
1175 1173 s = html_escape(s)
1176 1174 s = _urlify(s)
1177 1175 if repo_name is not None:
1178 1176 s = urlify_issues(s, repo_name)
1179 1177 if link_ is not None:
1180 1178 # make href around everything that isn't a href already
1181 1179 s = linkify_others(s, link_)
1182 1180 s = s.replace('\r\n', '<br/>').replace('\n', '<br/>')
1183 1181 # Turn HTML5 into more valid HTML4 as required by some mail readers.
1184 1182 # (This is not done in one step in html_escape, because character codes like
1185 1183 # &#123; risk to be seen as an issue reference due to the presence of '#'.)
1186 1184 s = s.replace("&apos;", "&#39;")
1187 1185 return literal(s)
1188 1186
1189 1187
1190 1188 def linkify_others(t, l):
1191 1189 """Add a default link to html with links.
1192 1190 HTML doesn't allow nesting of links, so the outer link must be broken up
1193 1191 in pieces and give space for other links.
1194 1192 """
1195 1193 urls = re.compile(r'(\<a.*?\<\/a\>)',)
1196 1194 links = []
1197 1195 for e in urls.split(t):
1198 1196 if e.strip() and not urls.match(e):
1199 1197 links.append('<a class="message-link" href="%s">%s</a>' % (l, e))
1200 1198 else:
1201 1199 links.append(e)
1202 1200
1203 1201 return ''.join(links)
1204 1202
1205 1203
1206 1204 # Global variable that will hold the actual urlify_issues function body.
1207 1205 # Will be set on first use when the global configuration has been read.
1208 1206 _urlify_issues_f = None
1209 1207
1210 1208
1211 1209 def urlify_issues(newtext, repo_name):
1212 1210 """Urlify issue references according to .ini configuration"""
1213 1211 global _urlify_issues_f
1214 1212 if _urlify_issues_f is None:
1215 from kallithea import CONFIG
1216 1213 from kallithea.model.db import URL_SEP
1217 assert CONFIG['sqlalchemy.url'] # make sure config has been loaded
1214 assert kallithea.CONFIG['sqlalchemy.url'] # make sure config has been loaded
1218 1215
1219 1216 # Build chain of urlify functions, starting with not doing any transformation
1220 1217 def tmp_urlify_issues_f(s):
1221 1218 return s
1222 1219
1223 1220 issue_pat_re = re.compile(r'issue_pat(.*)')
1224 for k in CONFIG:
1221 for k in kallithea.CONFIG:
1225 1222 # Find all issue_pat* settings that also have corresponding server_link and prefix configuration
1226 1223 m = issue_pat_re.match(k)
1227 1224 if m is None:
1228 1225 continue
1229 1226 suffix = m.group(1)
1230 issue_pat = CONFIG.get(k)
1231 issue_server_link = CONFIG.get('issue_server_link%s' % suffix)
1232 issue_sub = CONFIG.get('issue_sub%s' % suffix)
1233 issue_prefix = CONFIG.get('issue_prefix%s' % suffix)
1227 issue_pat = kallithea.CONFIG.get(k)
1228 issue_server_link = kallithea.CONFIG.get('issue_server_link%s' % suffix)
1229 issue_sub = kallithea.CONFIG.get('issue_sub%s' % suffix)
1230 issue_prefix = kallithea.CONFIG.get('issue_prefix%s' % suffix)
1234 1231 if issue_prefix:
1235 1232 log.error('found unsupported issue_prefix%s = %r - use issue_sub%s instead', suffix, issue_prefix, suffix)
1236 1233 if not issue_pat:
1237 1234 log.error('skipping incomplete issue pattern %r: it needs a regexp', k)
1238 1235 continue
1239 1236 if not issue_server_link:
1240 1237 log.error('skipping incomplete issue pattern %r: it needs issue_server_link%s', k, suffix)
1241 1238 continue
1242 1239 if issue_sub is None: # issue_sub can be empty but should be present
1243 1240 log.error('skipping incomplete issue pattern %r: it needs (a potentially empty) issue_sub%s', k, suffix)
1244 1241 continue
1245 1242
1246 1243 # Wrap tmp_urlify_issues_f with substitution of this pattern, while making sure all loop variables (and compiled regexpes) are bound
1247 1244 try:
1248 1245 issue_re = re.compile(issue_pat)
1249 1246 except re.error as e:
1250 1247 log.error('skipping invalid issue pattern %r: %r -> %r %r. Error: %s', k, issue_pat, issue_server_link, issue_sub, str(e))
1251 1248 continue
1252 1249
1253 1250 log.debug('issue pattern %r: %r -> %r %r', k, issue_pat, issue_server_link, issue_sub)
1254 1251
1255 1252 def issues_replace(match_obj,
1256 1253 issue_server_link=issue_server_link, issue_sub=issue_sub):
1257 1254 try:
1258 1255 issue_url = match_obj.expand(issue_server_link)
1259 1256 except (IndexError, re.error) as e:
1260 1257 log.error('invalid issue_url setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e))
1261 1258 issue_url = issue_server_link
1262 1259 issue_url = issue_url.replace('{repo}', repo_name)
1263 1260 issue_url = issue_url.replace('{repo_name}', repo_name.split(URL_SEP)[-1])
1264 1261 # if issue_sub is empty use the matched issue reference verbatim
1265 1262 if not issue_sub:
1266 1263 issue_text = match_obj.group()
1267 1264 else:
1268 1265 try:
1269 1266 issue_text = match_obj.expand(issue_sub)
1270 1267 except (IndexError, re.error) as e:
1271 1268 log.error('invalid issue_sub setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e))
1272 1269 issue_text = match_obj.group()
1273 1270
1274 1271 return (
1275 1272 '<a class="issue-tracker-link" href="%(url)s">'
1276 1273 '%(text)s'
1277 1274 '</a>'
1278 1275 ) % {
1279 1276 'url': issue_url,
1280 1277 'text': issue_text,
1281 1278 }
1282 1279
1283 1280 def tmp_urlify_issues_f(s, issue_re=issue_re, issues_replace=issues_replace, chain_f=tmp_urlify_issues_f):
1284 1281 return issue_re.sub(issues_replace, chain_f(s))
1285 1282
1286 1283 # Set tmp function globally - atomically
1287 1284 _urlify_issues_f = tmp_urlify_issues_f
1288 1285
1289 1286 return _urlify_issues_f(newtext)
1290 1287
1291 1288
1292 1289 def render_w_mentions(source, repo_name=None):
1293 1290 """
1294 1291 Render plain text with revision hashes and issue references urlified
1295 1292 and with @mention highlighting.
1296 1293 """
1297 1294 s = safe_str(source)
1298 1295 s = urlify_text(s, repo_name=repo_name)
1299 1296 return literal('<div class="formatted-fixed">%s</div>' % s)
1300 1297
1301 1298
1302 1299 def short_ref(ref_type, ref_name):
1303 1300 if ref_type == 'rev':
1304 1301 return short_id(ref_name)
1305 1302 return ref_name
1306 1303
1307 1304
1308 1305 def link_to_ref(repo_name, ref_type, ref_name, rev=None):
1309 1306 """
1310 1307 Return full markup for a href to changeset_home for a changeset.
1311 1308 If ref_type is branch it will link to changelog.
1312 1309 ref_name is shortened if ref_type is 'rev'.
1313 1310 if rev is specified show it too, explicitly linking to that revision.
1314 1311 """
1315 1312 txt = short_ref(ref_type, ref_name)
1316 1313 if ref_type == 'branch':
1317 1314 u = url('changelog_home', repo_name=repo_name, branch=ref_name)
1318 1315 else:
1319 1316 u = url('changeset_home', repo_name=repo_name, revision=ref_name)
1320 1317 l = link_to(repo_name + '#' + txt, u)
1321 1318 if rev and ref_type != 'rev':
1322 1319 l = literal('%s (%s)' % (l, link_to(short_id(rev), url('changeset_home', repo_name=repo_name, revision=rev))))
1323 1320 return l
1324 1321
1325 1322
1326 1323 def changeset_status(repo, revision):
1327 1324 from kallithea.model.changeset_status import ChangesetStatusModel
1328 1325 return ChangesetStatusModel().get_status(repo, revision)
1329 1326
1330 1327
1331 1328 def changeset_status_lbl(changeset_status):
1332 1329 from kallithea.model.db import ChangesetStatus
1333 1330 return ChangesetStatus.get_status_lbl(changeset_status)
1334 1331
1335 1332
1336 1333 def get_permission_name(key):
1337 1334 from kallithea.model.db import Permission
1338 1335 return dict(Permission.PERMS).get(key)
1339 1336
1340 1337
1341 1338 def journal_filter_help():
1342 1339 return _(textwrap.dedent('''
1343 1340 Example filter terms:
1344 1341 repository:vcs
1345 1342 username:developer
1346 1343 action:*push*
1347 1344 ip:127.0.0.1
1348 1345 date:20120101
1349 1346 date:[20120101100000 TO 20120102]
1350 1347
1351 1348 Generate wildcards using '*' character:
1352 1349 "repository:vcs*" - search everything starting with 'vcs'
1353 1350 "repository:*vcs*" - search for repository containing 'vcs'
1354 1351
1355 1352 Optional AND / OR operators in queries
1356 1353 "repository:vcs OR repository:test"
1357 1354 "username:test AND repository:test*"
1358 1355 '''))
1359 1356
1360 1357
1361 1358 def not_mapped_error(repo_name):
1362 1359 flash(_('%s repository is not mapped to db perhaps'
1363 1360 ' it was created or renamed from the filesystem'
1364 1361 ' please run the application again'
1365 1362 ' in order to rescan repositories') % repo_name, category='error')
1366 1363
1367 1364
1368 1365 def ip_range(ip_addr):
1369 1366 from kallithea.model.db import UserIpMap
1370 1367 s, e = UserIpMap._get_ip_range(ip_addr)
1371 1368 return '%s - %s' % (s, e)
1372 1369
1373 1370
1374 1371 session_csrf_secret_name = "_session_csrf_secret_token"
1375 1372
1376 1373 def session_csrf_secret_token():
1377 1374 """Return (and create) the current session's CSRF protection token."""
1378 1375 from tg import session
1379 1376 if not session_csrf_secret_name in session:
1380 1377 session[session_csrf_secret_name] = str(random.getrandbits(128))
1381 1378 session.save()
1382 1379 return session[session_csrf_secret_name]
1383 1380
1384 1381 def form(url, method="post", **attrs):
1385 1382 """Like webhelpers.html.tags.form , but automatically adding
1386 1383 session_csrf_secret_token for POST. The secret is thus never leaked in GET
1387 1384 URLs.
1388 1385 """
1389 1386 form = insecure_form(url, method, **attrs)
1390 1387 if method.lower() == 'get':
1391 1388 return form
1392 1389 return form + HTML.div(hidden(session_csrf_secret_name, session_csrf_secret_token()), style="display: none;")
@@ -1,188 +1,188 b''
1 1 import mock
2 2
3 from kallithea import CONFIG
3 import kallithea
4 4 from kallithea.config.conf import INDEX_FILENAMES
5 5 from kallithea.model.meta import Session
6 6 from kallithea.model.repo import RepoModel
7 7 from kallithea.model.repo_group import RepoGroupModel
8 8 from kallithea.tests import base
9 9 from kallithea.tests.fixture import Fixture, create_test_index
10 10
11 11
12 12 fixture = Fixture()
13 13
14 14
15 15 def init_indexing_test(repo):
16 16 prev = fixture.commit_change(repo.repo_name,
17 17 filename='this_should_be_unique_filename.txt',
18 18 content='this_should_be_unique_content\n',
19 19 message='this_should_be_unique_commit_log',
20 20 vcs_type='hg',
21 21 newfile=True)
22 22
23 23 def init_stopword_test(repo):
24 24 prev = fixture.commit_change(repo.repo_name,
25 25 filename='this/is/it',
26 26 content='def test\n',
27 27 message='bother to ask where - in folder',
28 28 vcs_type='hg',
29 29 newfile=True)
30 30 prev = fixture.commit_change(repo.repo_name,
31 31 filename='join.us',
32 32 content='def test\n',
33 33 message='bother to ask where - top level',
34 34 author='this is it <this-is-it@foo.bar.com>',
35 35 vcs_type='hg',
36 36 parent=prev,
37 37 newfile=True)
38 38
39 39
40 40 repos = [
41 41 # reponame, init func or fork base, groupname
42 42 ('indexing_test', init_indexing_test, None),
43 43 ('indexing_test-fork', 'indexing_test', None),
44 44 ('group/indexing_test', 'indexing_test', 'group'),
45 45 ('this-is-it', 'indexing_test', None),
46 46 ('indexing_test-foo', 'indexing_test', None),
47 47 ('stopword_test', init_stopword_test, None),
48 48 ]
49 49
50 50
51 51 # map: name => id
52 52 repoids = {}
53 53 groupids = {}
54 54
55 55
56 56 def rebuild_index(full_index):
57 57 with mock.patch('kallithea.lib.indexers.daemon.log.debug',
58 58 lambda *args, **kwargs: None):
59 59 # The more revisions managed repositories have, the more
60 60 # memory capturing "log.debug()" output in "indexers.daemon"
61 61 # requires. This may cause unintentional failure of subsequent
62 62 # tests, if ENOMEM at forking "git" prevents from rebuilding
63 63 # index for search.
64 64 # Therefore, "log.debug()" is disabled regardless of logging
65 65 # level while rebuilding index.
66 66 # (FYI, ENOMEM occurs at forking "git" with python 2.7.3,
67 67 # Linux 3.2.78-1 x86_64, 3GB memory, and no ulimit
68 68 # configuration for memory)
69 create_test_index(base.TESTS_TMP_PATH, CONFIG, full_index=full_index)
69 create_test_index(base.TESTS_TMP_PATH, kallithea.CONFIG, full_index=full_index)
70 70
71 71
72 72 class TestSearchControllerIndexing(base.TestController):
73 73 @classmethod
74 74 def setup_class(cls):
75 75 for reponame, init_or_fork, groupname in repos:
76 76 if groupname and groupname not in groupids:
77 77 group = fixture.create_repo_group(groupname)
78 78 groupids[groupname] = group.group_id
79 79 if callable(init_or_fork):
80 80 repo = fixture.create_repo(reponame,
81 81 repo_group=groupname)
82 82 init_or_fork(repo)
83 83 else:
84 84 repo = fixture.create_fork(init_or_fork, reponame,
85 85 repo_group=groupname)
86 86 repoids[reponame] = repo.repo_id
87 87
88 88 # treat "it" as indexable filename
89 89 filenames_mock = list(INDEX_FILENAMES)
90 90 filenames_mock.append('it')
91 91 with mock.patch('kallithea.lib.indexers.daemon.INDEX_FILENAMES',
92 92 filenames_mock):
93 93 rebuild_index(full_index=False) # only for newly added repos
94 94
95 95 @classmethod
96 96 def teardown_class(cls):
97 97 # delete in reversed order, to delete fork destination at first
98 98 for reponame, init_or_fork, groupname in reversed(repos):
99 99 RepoModel().delete(repoids[reponame])
100 100
101 101 for reponame, init_or_fork, groupname in reversed(repos):
102 102 if groupname in groupids:
103 103 RepoGroupModel().delete(groupids.pop(groupname),
104 104 force_delete=True)
105 105
106 106 Session().commit()
107 107 Session.remove()
108 108
109 109 rebuild_index(full_index=True) # rebuild fully for subsequent tests
110 110
111 111 @base.parametrize('reponame', [
112 112 ('indexing_test'),
113 113 ('indexing_test-fork'),
114 114 ('group/indexing_test'),
115 115 ('this-is-it'),
116 116 ('*-fork'),
117 117 ('group/*'),
118 118 ])
119 119 @base.parametrize('searchtype,query,hit', [
120 120 ('content', 'this_should_be_unique_content', 1),
121 121 ('commit', 'this_should_be_unique_commit_log', 1),
122 122 ('path', 'this_should_be_unique_filename.txt', 1),
123 123 ])
124 124 def test_repository_tokenization(self, reponame, searchtype, query, hit):
125 125 self.log_user()
126 126
127 127 q = 'repository:%s %s' % (reponame, query)
128 128 response = self.app.get(base.url(controller='search', action='index'),
129 129 {'q': q, 'type': searchtype})
130 130 response.mustcontain('>%d results' % hit)
131 131
132 132 @base.parametrize('reponame', [
133 133 ('indexing_test'),
134 134 ('indexing_test-fork'),
135 135 ('group/indexing_test'),
136 136 ('this-is-it'),
137 137 ])
138 138 @base.parametrize('searchtype,query,hit', [
139 139 ('content', 'this_should_be_unique_content', 1),
140 140 ('commit', 'this_should_be_unique_commit_log', 1),
141 141 ('path', 'this_should_be_unique_filename.txt', 1),
142 142 ])
143 143 def test_searching_under_repository(self, reponame, searchtype, query, hit):
144 144 self.log_user()
145 145
146 146 response = self.app.get(base.url(controller='search', action='index',
147 147 repo_name=reponame),
148 148 {'q': query, 'type': searchtype})
149 149 response.mustcontain('>%d results' % hit)
150 150
151 151 @base.parametrize('searchtype,query,hit', [
152 152 ('content', 'path:this/is/it def test', 1),
153 153 ('commit', 'added:this/is/it bother to ask where', 1),
154 154 # this condition matches against files below, because
155 155 # "path:" condition is also applied on "repository path".
156 156 # - "this/is/it" in "stopword_test" repo
157 157 # - "this_should_be_unique_filename.txt" in "this-is-it" repo
158 158 ('path', 'this/is/it', 2),
159 159
160 160 ('content', 'extension:us', 1),
161 161 ('path', 'extension:us', 1),
162 162 ])
163 163 def test_filename_stopword(self, searchtype, query, hit):
164 164 response = self.app.get(base.url(controller='search', action='index'),
165 165 {'q': query, 'type': searchtype})
166 166
167 167 response.mustcontain('>%d results' % hit)
168 168
169 169 @base.parametrize('searchtype,query,hit', [
170 170 # matching against both 2 files
171 171 ('content', 'owner:"this is it"', 0),
172 172 ('content', 'owner:this-is-it', 0),
173 173 ('path', 'owner:"this is it"', 0),
174 174 ('path', 'owner:this-is-it', 0),
175 175
176 176 # matching against both 2 revisions
177 177 ('commit', 'owner:"this is it"', 0),
178 178 ('commit', 'owner:"this-is-it"', 0),
179 179
180 180 # matching against only 1 revision
181 181 ('commit', 'author:"this is it"', 1),
182 182 ('commit', 'author:"this-is-it"', 1),
183 183 ])
184 184 def test_mailaddr_stopword(self, searchtype, query, hit):
185 185 response = self.app.get(base.url(controller='search', action='index'),
186 186 {'q': query, 'type': searchtype})
187 187
188 188 response.mustcontain('>%d results' % hit)
@@ -1,642 +1,642 b''
1 1 # -*- coding: utf-8 -*-
2 2 # This program is free software: you can redistribute it and/or modify
3 3 # it under the terms of the GNU General Public License as published by
4 4 # the Free Software Foundation, either version 3 of the License, or
5 5 # (at your option) any later version.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 """
15 15 Test suite for vcs push/pull operations.
16 16
17 17 The tests need Git > 1.8.1.
18 18
19 19 This file was forked by the Kallithea project in July 2014.
20 20 Original author and date, and relevant copyright and licensing information is below:
21 21 :created_on: Dec 30, 2010
22 22 :author: marcink
23 23 :copyright: (c) 2013 RhodeCode GmbH, and others.
24 24 :license: GPLv3, see LICENSE.md for more details.
25 25
26 26 """
27 27
28 28 import json
29 29 import os
30 30 import re
31 31 import tempfile
32 32 import time
33 33 import urllib.request
34 34 from subprocess import PIPE, Popen
35 35 from tempfile import _RandomNameSequence
36 36
37 37 import pytest
38 38
39 from kallithea import CONFIG
39 import kallithea
40 40 from kallithea.lib.utils2 import ascii_bytes, safe_str
41 41 from kallithea.model.db import Repository, Ui, User, UserIpMap, UserLog
42 42 from kallithea.model.meta import Session
43 43 from kallithea.model.ssh_key import SshKeyModel
44 44 from kallithea.model.user import UserModel
45 45 from kallithea.tests import base
46 46 from kallithea.tests.fixture import Fixture
47 47
48 48
49 49 DEBUG = True
50 50 HOST = '127.0.0.1:4999' # test host
51 51
52 52 fixture = Fixture()
53 53
54 54
55 55 # Parameterize different kinds of VCS testing - both the kind of VCS and the
56 56 # access method (HTTP/SSH)
57 57
58 58 # Mixin for using HTTP and SSH URLs
59 59 class HttpVcsTest(object):
60 60 @staticmethod
61 61 def repo_url_param(webserver, repo_name, **kwargs):
62 62 return webserver.repo_url(repo_name, **kwargs)
63 63
64 64 class SshVcsTest(object):
65 65 public_keys = {
66 66 base.TEST_USER_REGULAR_LOGIN: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUQ== kallithea@localhost',
67 67 base.TEST_USER_ADMIN_LOGIN: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUq== kallithea@localhost',
68 68 }
69 69
70 70 @classmethod
71 71 def repo_url_param(cls, webserver, repo_name, username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS, client_ip=base.IP_ADDR):
72 72 user = User.get_by_username(username)
73 73 if user.ssh_keys:
74 74 ssh_key = user.ssh_keys[0]
75 75 else:
76 76 sshkeymodel = SshKeyModel()
77 77 ssh_key = sshkeymodel.create(user, 'test key', cls.public_keys[user.username])
78 78 Session().commit()
79 79
80 80 return cls._ssh_param(repo_name, user, ssh_key, client_ip)
81 81
82 82 # Mixins for using Mercurial and Git
83 83 class HgVcsTest(object):
84 84 repo_type = 'hg'
85 85 repo_name = base.HG_REPO
86 86
87 87 class GitVcsTest(object):
88 88 repo_type = 'git'
89 89 repo_name = base.GIT_REPO
90 90
91 91 # Combine mixins to give the combinations we want to parameterize tests with
92 92 class HgHttpVcsTest(HgVcsTest, HttpVcsTest):
93 93 pass
94 94
95 95 class GitHttpVcsTest(GitVcsTest, HttpVcsTest):
96 96 pass
97 97
98 98 class HgSshVcsTest(HgVcsTest, SshVcsTest):
99 99 @staticmethod
100 100 def _ssh_param(repo_name, user, ssh_key, client_ip):
101 101 # Specify a custom ssh command on the command line
102 102 return r"""--config ui.ssh="bash -c 'SSH_ORIGINAL_COMMAND=\"\$2\" SSH_CONNECTION=\"%s 1024 127.0.0.1 22\" kallithea-cli ssh-serve -c %s %s %s' --" ssh://someuser@somehost/%s""" % (
103 103 client_ip,
104 CONFIG['__file__'],
104 kallithea.CONFIG['__file__'],
105 105 user.user_id,
106 106 ssh_key.user_ssh_key_id,
107 107 repo_name)
108 108
109 109 class GitSshVcsTest(GitVcsTest, SshVcsTest):
110 110 @staticmethod
111 111 def _ssh_param(repo_name, user, ssh_key, client_ip):
112 112 # Set a custom ssh command in the global environment
113 113 os.environ['GIT_SSH_COMMAND'] = r"""bash -c 'SSH_ORIGINAL_COMMAND="$2" SSH_CONNECTION="%s 1024 127.0.0.1 22" kallithea-cli ssh-serve -c %s %s %s' --""" % (
114 114 client_ip,
115 CONFIG['__file__'],
115 kallithea.CONFIG['__file__'],
116 116 user.user_id,
117 117 ssh_key.user_ssh_key_id)
118 118 return "ssh://someuser@somehost/%s""" % repo_name
119 119
120 120 parametrize_vcs_test = base.parametrize('vt', [
121 121 HgHttpVcsTest,
122 122 GitHttpVcsTest,
123 123 HgSshVcsTest,
124 124 GitSshVcsTest,
125 125 ])
126 126 parametrize_vcs_test_hg = base.parametrize('vt', [
127 127 HgHttpVcsTest,
128 128 HgSshVcsTest,
129 129 ])
130 130 parametrize_vcs_test_http = base.parametrize('vt', [
131 131 HgHttpVcsTest,
132 132 GitHttpVcsTest,
133 133 ])
134 134
135 135 class Command(object):
136 136
137 137 def __init__(self, cwd):
138 138 self.cwd = cwd
139 139
140 140 def execute(self, *args, **environ):
141 141 """
142 142 Runs command on the system with given ``args`` using simple space
143 143 join without safe quoting.
144 144 """
145 145 command = ' '.join(args)
146 146 ignoreReturnCode = environ.pop('ignoreReturnCode', False)
147 147 if DEBUG:
148 148 print('*** CMD %s ***' % command)
149 149 testenv = dict(os.environ)
150 150 testenv['LANG'] = 'en_US.UTF-8'
151 151 testenv['LANGUAGE'] = 'en_US:en'
152 152 testenv['HGPLAIN'] = ''
153 153 testenv['HGRCPATH'] = ''
154 154 testenv.update(environ)
155 155 p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd, env=testenv)
156 156 stdout, stderr = p.communicate()
157 157 if DEBUG:
158 158 if stdout:
159 159 print('stdout:', stdout)
160 160 if stderr:
161 161 print('stderr:', stderr)
162 162 if not ignoreReturnCode:
163 163 assert p.returncode == 0
164 164 return safe_str(stdout), safe_str(stderr)
165 165
166 166
167 167 def _get_tmp_dir(prefix='vcs_operations-', suffix=''):
168 168 return tempfile.mkdtemp(dir=base.TESTS_TMP_PATH, prefix=prefix, suffix=suffix)
169 169
170 170
171 171 def _add_files(vcs, dest_dir, files_no=3):
172 172 """
173 173 Generate some files, add it to dest_dir repo and push back
174 174 vcs is git or hg and defines what VCS we want to make those files for
175 175
176 176 :param vcs:
177 177 :param dest_dir:
178 178 """
179 179 added_file = '%ssetup.py' % next(_RandomNameSequence())
180 180 open(os.path.join(dest_dir, added_file), 'a').close()
181 181 Command(dest_dir).execute(vcs, 'add', added_file)
182 182
183 183 email = 'me@example.com'
184 184 if os.name == 'nt':
185 185 author_str = 'User <%s>' % email
186 186 else:
187 187 author_str = 'User ǝɯɐᴎ <%s>' % email
188 188 for i in range(files_no):
189 189 cmd = """echo "added_line%s" >> %s""" % (i, added_file)
190 190 Command(dest_dir).execute(cmd)
191 191 if vcs == 'hg':
192 192 cmd = """hg commit -m "committed new %s" -u "%s" "%s" """ % (
193 193 i, author_str, added_file
194 194 )
195 195 elif vcs == 'git':
196 196 cmd = """git commit -m "committed new %s" --author "%s" "%s" """ % (
197 197 i, author_str, added_file
198 198 )
199 199 # git commit needs EMAIL on some machines
200 200 Command(dest_dir).execute(cmd, EMAIL=email)
201 201
202 202 def _add_files_and_push(webserver, vt, dest_dir, clone_url, ignoreReturnCode=False, files_no=3):
203 203 _add_files(vt.repo_type, dest_dir, files_no=files_no)
204 204 # PUSH it back
205 205 stdout = stderr = None
206 206 if vt.repo_type == 'hg':
207 207 stdout, stderr = Command(dest_dir).execute('hg push -f --verbose', clone_url, ignoreReturnCode=ignoreReturnCode)
208 208 elif vt.repo_type == 'git':
209 209 stdout, stderr = Command(dest_dir).execute('git push -f --verbose', clone_url, "master", ignoreReturnCode=ignoreReturnCode)
210 210
211 211 return stdout, stderr
212 212
213 213
214 214 def _check_outgoing(vcs, cwd, clone_url):
215 215 if vcs == 'hg':
216 216 # hg removes the password from default URLs, so we have to provide it here via the clone_url
217 217 return Command(cwd).execute('hg -q outgoing', clone_url, ignoreReturnCode=True)
218 218 elif vcs == 'git':
219 219 Command(cwd).execute('git remote update')
220 220 return Command(cwd).execute('git log origin/master..master')
221 221
222 222
223 223 def set_anonymous_access(enable=True):
224 224 user = User.get_default_user()
225 225 user.active = enable
226 226 Session().commit()
227 227 if enable != User.get_default_user().active:
228 228 raise Exception('Cannot set anonymous access')
229 229
230 230
231 231 #==============================================================================
232 232 # TESTS
233 233 #==============================================================================
234 234
235 235
236 236 def _check_proper_git_push(stdout, stderr):
237 237 assert 'fatal' not in stderr
238 238 assert 'rejected' not in stderr
239 239 assert 'Pushing to' in stderr
240 240 assert 'master -> master' in stderr
241 241
242 242
243 243 @pytest.mark.usefixtures("test_context_fixture")
244 244 class TestVCSOperations(base.TestController):
245 245
246 246 @classmethod
247 247 def setup_class(cls):
248 248 # DISABLE ANONYMOUS ACCESS
249 249 set_anonymous_access(False)
250 250
251 251 @pytest.fixture()
252 252 def testhook_cleanup(self):
253 253 yield
254 254 # remove hook
255 255 for hook in ['prechangegroup', 'pretxnchangegroup', 'preoutgoing', 'changegroup', 'outgoing', 'incoming']:
256 256 entry = Ui.get_by_key('hooks', '%s.testhook' % hook)
257 257 if entry:
258 258 Session().delete(entry)
259 259 Session().commit()
260 260
261 261 @pytest.fixture(scope="module")
262 262 def testfork(self):
263 263 # create fork so the repo stays untouched
264 264 git_fork_name = '%s_fork%s' % (base.GIT_REPO, next(_RandomNameSequence()))
265 265 fixture.create_fork(base.GIT_REPO, git_fork_name)
266 266 hg_fork_name = '%s_fork%s' % (base.HG_REPO, next(_RandomNameSequence()))
267 267 fixture.create_fork(base.HG_REPO, hg_fork_name)
268 268 return {'git': git_fork_name, 'hg': hg_fork_name}
269 269
270 270 @parametrize_vcs_test
271 271 def test_clone_repo_by_admin(self, webserver, vt):
272 272 clone_url = vt.repo_url_param(webserver, vt.repo_name)
273 273 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir())
274 274
275 275 if vt.repo_type == 'git':
276 276 assert 'Cloning into' in stdout + stderr
277 277 assert stderr == '' or stdout == ''
278 278 elif vt.repo_type == 'hg':
279 279 assert 'requesting all changes' in stdout
280 280 assert 'adding changesets' in stdout
281 281 assert 'adding manifests' in stdout
282 282 assert 'adding file changes' in stdout
283 283 assert stderr == ''
284 284
285 285 @parametrize_vcs_test_http
286 286 def test_clone_wrong_credentials(self, webserver, vt):
287 287 clone_url = vt.repo_url_param(webserver, vt.repo_name, password='bad!')
288 288 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
289 289 if vt.repo_type == 'git':
290 290 assert 'fatal: Authentication failed' in stderr
291 291 elif vt.repo_type == 'hg':
292 292 assert 'abort: authorization failed' in stderr
293 293
294 294 def test_clone_git_dir_as_hg(self, webserver):
295 295 clone_url = HgHttpVcsTest.repo_url_param(webserver, base.GIT_REPO)
296 296 stdout, stderr = Command(base.TESTS_TMP_PATH).execute('hg clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
297 297 assert 'HTTP Error 404: Not Found' in stderr or "not a valid repository" in stdout and 'abort:' in stderr
298 298
299 299 def test_clone_hg_repo_as_git(self, webserver):
300 300 clone_url = GitHttpVcsTest.repo_url_param(webserver, base.HG_REPO)
301 301 stdout, stderr = Command(base.TESTS_TMP_PATH).execute('git clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
302 302 assert 'not found' in stderr
303 303
304 304 @parametrize_vcs_test
305 305 def test_clone_non_existing_path(self, webserver, vt):
306 306 clone_url = vt.repo_url_param(webserver, 'trololo')
307 307 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
308 308 if vt.repo_type == 'git':
309 309 assert 'not found' in stderr or 'abort: Access to %r denied' % 'trololo' in stderr
310 310 elif vt.repo_type == 'hg':
311 311 assert 'HTTP Error 404: Not Found' in stderr or 'abort: no suitable response from remote hg' in stderr and 'remote: abort: Access to %r denied' % 'trololo' in stdout + stderr
312 312
313 313 @parametrize_vcs_test
314 314 def test_push_new_repo(self, webserver, vt):
315 315 # Clear the log so we know what is added
316 316 UserLog.query().delete()
317 317 Session().commit()
318 318
319 319 # Create an empty server repo using the API
320 320 repo_name = 'new_%s_%s' % (vt.repo_type, next(_RandomNameSequence()))
321 321 usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
322 322 params = {
323 323 "id": 7,
324 324 "api_key": usr.api_key,
325 325 "method": 'create_repo',
326 326 "args": dict(repo_name=repo_name,
327 327 owner=base.TEST_USER_ADMIN_LOGIN,
328 328 repo_type=vt.repo_type),
329 329 }
330 330 req = urllib.request.Request(
331 331 'http://%s:%s/_admin/api' % webserver.server_address,
332 332 data=ascii_bytes(json.dumps(params)),
333 333 headers={'content-type': 'application/json'})
334 334 response = urllib.request.urlopen(req)
335 335 result = json.loads(response.read())
336 336 # Expect something like:
337 337 # {u'result': {u'msg': u'Created new repository `new_XXX`', u'task': None, u'success': True}, u'id': 7, u'error': None}
338 338 assert result['result']['success']
339 339
340 340 # Create local clone of the empty server repo
341 341 local_clone_dir = _get_tmp_dir()
342 342 clone_url = vt.repo_url_param(webserver, repo_name)
343 343 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, local_clone_dir)
344 344
345 345 # Make 3 commits and push to the empty server repo.
346 346 # The server repo doesn't have any other heads than the
347 347 # refs/heads/master we are pushing, but the `git log` in the push hook
348 348 # should still list the 3 commits.
349 349 stdout, stderr = _add_files_and_push(webserver, vt, local_clone_dir, clone_url=clone_url)
350 350 if vt.repo_type == 'git':
351 351 _check_proper_git_push(stdout, stderr)
352 352 elif vt.repo_type == 'hg':
353 353 assert 'pushing to ' in stdout
354 354 assert 'remote: added ' in stdout
355 355
356 356 # Verify that we got the right events in UserLog. Expect something like:
357 357 # <UserLog('id:new_git_XXX:started_following_repo')>
358 358 # <UserLog('id:new_git_XXX:user_created_repo')>
359 359 # <UserLog('id:new_git_XXX:pull')>
360 360 # <UserLog('id:new_git_XXX:push:aed9d4c1732a1927da3be42c47eb9afdc200d427,d38b083a07af10a9f44193486959a96a23db78da,4841ff9a2b385bec995f4679ef649adb3f437622')>
361 361 Session.close() # make sure SA fetches all new log entries (apparently only needed for MariaDB/MySQL ...)
362 362 action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)]
363 363 assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == ([
364 364 ('started_following_repo', 0),
365 365 ('user_created_repo', 0),
366 366 ('pull', 0),
367 367 ('push', 3)]
368 368 if vt.repo_type == 'git' else [
369 369 ('started_following_repo', 0),
370 370 ('user_created_repo', 0),
371 371 # (u'pull', 0), # Mercurial outgoing hook is not called for empty clones
372 372 ('push', 3)])
373 373
374 374 @parametrize_vcs_test
375 375 def test_push_new_file(self, webserver, testfork, vt):
376 376 UserLog.query().delete()
377 377 Session().commit()
378 378
379 379 dest_dir = _get_tmp_dir()
380 380 clone_url = vt.repo_url_param(webserver, vt.repo_name)
381 381 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
382 382
383 383 clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type])
384 384 stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, clone_url=clone_url)
385 385
386 386 if vt.repo_type == 'git':
387 387 _check_proper_git_push(stdout, stderr)
388 388 elif vt.repo_type == 'hg':
389 389 assert 'pushing to' in stdout
390 390 assert 'Repository size' in stdout
391 391 assert 'Last revision is now' in stdout
392 392
393 393 Session.close() # make sure SA fetches all new log entries (apparently only needed for MariaDB/MySQL ...)
394 394 action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)]
395 395 assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == \
396 396 [('pull', 0), ('push', 3)]
397 397
398 398 @parametrize_vcs_test
399 399 def test_pull(self, webserver, testfork, vt):
400 400 UserLog.query().delete()
401 401 Session().commit()
402 402
403 403 dest_dir = _get_tmp_dir()
404 404 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'init', dest_dir)
405 405
406 406 clone_url = vt.repo_url_param(webserver, vt.repo_name)
407 407 stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url)
408 408 Session.close() # make sure SA fetches all new log entries (apparently only needed for MariaDB/MySQL ...)
409 409
410 410 if vt.repo_type == 'git':
411 411 assert 'FETCH_HEAD' in stderr
412 412 elif vt.repo_type == 'hg':
413 413 assert 'new changesets' in stdout
414 414
415 415 action_parts = [ul.action for ul in UserLog.query().order_by(UserLog.user_log_id)]
416 416 assert action_parts == ['pull']
417 417
418 418 # Test handling of URLs with extra '/' around repo_name
419 419 stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url.replace('/' + vt.repo_name, '/./%s/' % vt.repo_name), ignoreReturnCode=True)
420 420 if issubclass(vt, HttpVcsTest):
421 421 if vt.repo_type == 'git':
422 422 # NOTE: when pulling from http://hostname/./vcs_test_git/ , the git client will normalize that and issue an HTTP request to /vcs_test_git/info/refs
423 423 assert 'Already up to date.' in stdout
424 424 else:
425 425 assert vt.repo_type == 'hg'
426 426 assert "abort: HTTP Error 404: Not Found" in stderr
427 427 else:
428 428 assert issubclass(vt, SshVcsTest)
429 429 if vt.repo_type == 'git':
430 430 assert "abort: Access to './%s' denied" % vt.repo_name in stderr
431 431 else:
432 432 assert "abort: Access to './%s' denied" % vt.repo_name in stdout + stderr
433 433
434 434 stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url.replace('/' + vt.repo_name, '/%s/' % vt.repo_name), ignoreReturnCode=True)
435 435 if vt.repo_type == 'git':
436 436 assert 'Already up to date.' in stdout
437 437 else:
438 438 assert vt.repo_type == 'hg'
439 439 assert "no changes found" in stdout
440 440 assert "denied" not in stderr
441 441 assert "denied" not in stdout
442 442 assert "404" not in stdout
443 443
444 444 @parametrize_vcs_test
445 445 def test_push_invalidates_cache(self, webserver, testfork, vt):
446 446 pre_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == testfork[vt.repo_type])]
447 447
448 448 dest_dir = _get_tmp_dir()
449 449 clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type])
450 450 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
451 451
452 452 stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, files_no=1, clone_url=clone_url)
453 453
454 454 if vt.repo_type == 'git':
455 455 _check_proper_git_push(stdout, stderr)
456 456
457 457 Session.close() # expire session to make sure SA fetches new Repository instances after last_changeset has been updated by server side hook in another process
458 458 post_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == testfork[vt.repo_type])]
459 459 assert pre_cached_tip != post_cached_tip
460 460
461 461 @parametrize_vcs_test_http
462 462 def test_push_wrong_credentials(self, webserver, vt):
463 463 dest_dir = _get_tmp_dir()
464 464 clone_url = vt.repo_url_param(webserver, vt.repo_name)
465 465 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
466 466
467 467 clone_url = webserver.repo_url(vt.repo_name, username='bad', password='name')
468 468 stdout, stderr = _add_files_and_push(webserver, vt, dest_dir,
469 469 clone_url=clone_url, ignoreReturnCode=True)
470 470
471 471 if vt.repo_type == 'git':
472 472 assert 'fatal: Authentication failed' in stderr
473 473 elif vt.repo_type == 'hg':
474 474 assert 'abort: authorization failed' in stderr
475 475
476 476 @parametrize_vcs_test
477 477 def test_push_with_readonly_credentials(self, webserver, vt):
478 478 UserLog.query().delete()
479 479 Session().commit()
480 480
481 481 dest_dir = _get_tmp_dir()
482 482 clone_url = vt.repo_url_param(webserver, vt.repo_name, username=base.TEST_USER_REGULAR_LOGIN, password=base.TEST_USER_REGULAR_PASS)
483 483 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
484 484
485 485 stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, ignoreReturnCode=True, clone_url=clone_url)
486 486
487 487 if vt.repo_type == 'git':
488 488 assert 'The requested URL returned error: 403' in stderr or 'abort: Push access to %r denied' % str(vt.repo_name) in stderr
489 489 elif vt.repo_type == 'hg':
490 490 assert 'abort: HTTP Error 403: Forbidden' in stderr or 'abort: push failed on remote' in stderr and 'remote: Push access to %r denied' % str(vt.repo_name) in stdout
491 491
492 492 Session.close() # make sure SA fetches all new log entries (apparently only needed for MariaDB/MySQL ...)
493 493 action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)]
494 494 assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == \
495 495 [('pull', 0)]
496 496
497 497 @parametrize_vcs_test
498 498 def test_push_back_to_wrong_url(self, webserver, vt):
499 499 dest_dir = _get_tmp_dir()
500 500 clone_url = vt.repo_url_param(webserver, vt.repo_name)
501 501 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
502 502
503 503 stdout, stderr = _add_files_and_push(
504 504 webserver, vt, dest_dir, clone_url='http://%s:%s/tmp' % (
505 505 webserver.server_address[0], webserver.server_address[1]),
506 506 ignoreReturnCode=True)
507 507
508 508 if vt.repo_type == 'git':
509 509 assert 'not found' in stderr
510 510 elif vt.repo_type == 'hg':
511 511 assert 'HTTP Error 404: Not Found' in stderr
512 512
513 513 @parametrize_vcs_test
514 514 def test_ip_restriction(self, webserver, vt):
515 515 user_model = UserModel()
516 516 try:
517 517 # Add IP constraint that excludes the test context:
518 518 user_model.add_extra_ip(base.TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
519 519 Session().commit()
520 520 # IP permissions are cached, need to wait for the cache in the server process to expire
521 521 time.sleep(1.5)
522 522 clone_url = vt.repo_url_param(webserver, vt.repo_name)
523 523 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
524 524 if vt.repo_type == 'git':
525 525 # The message apparently changed in Git 1.8.3, so match it loosely.
526 526 assert re.search(r'\b403\b', stderr) or 'abort: User test_admin from 127.0.0.127 cannot be authorized' in stderr
527 527 elif vt.repo_type == 'hg':
528 528 assert 'abort: HTTP Error 403: Forbidden' in stderr or 'remote: abort: User test_admin from 127.0.0.127 cannot be authorized' in stdout + stderr
529 529 finally:
530 530 # release IP restrictions
531 531 for ip in UserIpMap.query():
532 532 UserIpMap.delete(ip.ip_id)
533 533 Session().commit()
534 534 # IP permissions are cached, need to wait for the cache in the server process to expire
535 535 time.sleep(1.5)
536 536
537 537 clone_url = vt.repo_url_param(webserver, vt.repo_name)
538 538 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir())
539 539
540 540 if vt.repo_type == 'git':
541 541 assert 'Cloning into' in stdout + stderr
542 542 assert stderr == '' or stdout == ''
543 543 elif vt.repo_type == 'hg':
544 544 assert 'requesting all changes' in stdout
545 545 assert 'adding changesets' in stdout
546 546 assert 'adding manifests' in stdout
547 547 assert 'adding file changes' in stdout
548 548
549 549 assert stderr == ''
550 550
551 551 @parametrize_vcs_test_hg # git hooks doesn't work like hg hooks
552 552 def test_custom_hooks_preoutgoing(self, testhook_cleanup, webserver, testfork, vt):
553 553 # set prechangegroup to failing hook (returns True)
554 554 Ui.create_or_update_hook('preoutgoing.testhook', 'python:kallithea.tests.fixture.failing_test_hook')
555 555 Session().commit()
556 556 # clone repo
557 557 clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type], username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS)
558 558 dest_dir = _get_tmp_dir()
559 559 stdout, stderr = Command(base.TESTS_TMP_PATH) \
560 560 .execute(vt.repo_type, 'clone', clone_url, dest_dir, ignoreReturnCode=True)
561 561 if vt.repo_type == 'hg':
562 562 assert 'preoutgoing.testhook hook failed' in stdout
563 563 elif vt.repo_type == 'git':
564 564 assert 'error: 406' in stderr
565 565
566 566 @parametrize_vcs_test_hg # git hooks doesn't work like hg hooks
567 567 def test_custom_hooks_prechangegroup(self, testhook_cleanup, webserver, testfork, vt):
568 568 # set prechangegroup to failing hook (returns exit code 1)
569 569 Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.failing_test_hook')
570 570 Session().commit()
571 571 # clone repo
572 572 clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type], username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS)
573 573 dest_dir = _get_tmp_dir()
574 574 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
575 575
576 576 stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, clone_url,
577 577 ignoreReturnCode=True)
578 578 assert 'failing_test_hook failed' in stdout + stderr
579 579 assert 'Traceback' not in stdout + stderr
580 580 assert 'prechangegroup.testhook hook failed' in stdout + stderr
581 581 # there are still outgoing changesets
582 582 stdout, stderr = _check_outgoing(vt.repo_type, dest_dir, clone_url)
583 583 assert stdout != ''
584 584
585 585 # set prechangegroup hook to exception throwing method
586 586 Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.exception_test_hook')
587 587 Session().commit()
588 588 # re-try to push
589 589 stdout, stderr = Command(dest_dir).execute('%s push' % vt.repo_type, clone_url, ignoreReturnCode=True)
590 590 if vt is HgHttpVcsTest:
591 591 # like with 'hg serve...' 'HTTP Error 500: INTERNAL SERVER ERROR' should be returned
592 592 assert 'HTTP Error 500: INTERNAL SERVER ERROR' in stderr
593 593 elif vt is HgSshVcsTest:
594 594 assert 'remote: Exception: exception_test_hook threw an exception' in stdout
595 595 else: assert False
596 596 # there are still outgoing changesets
597 597 stdout, stderr = _check_outgoing(vt.repo_type, dest_dir, clone_url)
598 598 assert stdout != ''
599 599
600 600 # set prechangegroup hook to method that returns False
601 601 Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.passing_test_hook')
602 602 Session().commit()
603 603 # re-try to push
604 604 stdout, stderr = Command(dest_dir).execute('%s push' % vt.repo_type, clone_url, ignoreReturnCode=True)
605 605 assert 'passing_test_hook succeeded' in stdout + stderr
606 606 assert 'Traceback' not in stdout + stderr
607 607 assert 'prechangegroup.testhook hook failed' not in stdout + stderr
608 608 # no more outgoing changesets
609 609 stdout, stderr = _check_outgoing(vt.repo_type, dest_dir, clone_url)
610 610 assert stdout == ''
611 611 assert stderr == ''
612 612
613 613 def test_add_submodule_git(self, webserver, testfork):
614 614 dest_dir = _get_tmp_dir()
615 615 clone_url = GitHttpVcsTest.repo_url_param(webserver, base.GIT_REPO)
616 616
617 617 fork_url = GitHttpVcsTest.repo_url_param(webserver, testfork['git'])
618 618
619 619 # add submodule
620 620 stdout, stderr = Command(base.TESTS_TMP_PATH).execute('git clone', fork_url, dest_dir)
621 621 stdout, stderr = Command(dest_dir).execute('git submodule add', clone_url, 'testsubmodule')
622 622 stdout, stderr = Command(dest_dir).execute('git commit -am "added testsubmodule pointing to', clone_url, '"', EMAIL=base.TEST_USER_ADMIN_EMAIL)
623 623 stdout, stderr = Command(dest_dir).execute('git push', fork_url, 'master')
624 624
625 625 # check for testsubmodule link in files page
626 626 self.log_user()
627 627 response = self.app.get(base.url(controller='files', action='index',
628 628 repo_name=testfork['git'],
629 629 revision='tip',
630 630 f_path='/'))
631 631 # check _repo_files_url that will be used to reload as AJAX
632 632 response.mustcontain('var _repo_files_url = ("/%s/files/");' % testfork['git'])
633 633
634 634 response.mustcontain('<a class="submodule-dir" href="%s" target="_blank"><i class="icon-file-submodule"></i><span>testsubmodule @ ' % clone_url)
635 635
636 636 # check that following a submodule link actually works - and redirects
637 637 response = self.app.get(base.url(controller='files', action='index',
638 638 repo_name=testfork['git'],
639 639 revision='tip',
640 640 f_path='/testsubmodule'),
641 641 status=302)
642 642 assert response.location == clone_url
General Comments 0
You need to be logged in to leave comments. Login now