##// END OF EJS Templates
imports: always use CONFIG as kallithea.CONFIG
Mads Kiilerich -
r8433:072c0352 default
parent child Browse files
Show More
@@ -1,134 +1,134 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 kallithea.controllers.feed
15 kallithea.controllers.feed
16 ~~~~~~~~~~~~~~~~~~~~~~~~~~
16 ~~~~~~~~~~~~~~~~~~~~~~~~~~
17
17
18 Feed controller for Kallithea
18 Feed controller for Kallithea
19
19
20 This file was forked by the Kallithea project in July 2014.
20 This file was forked by the Kallithea project in July 2014.
21 Original author and date, and relevant copyright and licensing information is below:
21 Original author and date, and relevant copyright and licensing information is below:
22 :created_on: Apr 23, 2010
22 :created_on: Apr 23, 2010
23 :author: marcink
23 :author: marcink
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 :license: GPLv3, see LICENSE.md for more details.
25 :license: GPLv3, see LICENSE.md for more details.
26 """
26 """
27
27
28
28
29 import logging
29 import logging
30
30
31 from beaker.cache import cache_region
31 from beaker.cache import cache_region
32 from tg import response
32 from tg import response
33 from tg import tmpl_context as c
33 from tg import tmpl_context as c
34 from tg.i18n import ugettext as _
34 from tg.i18n import ugettext as _
35
35
36 from kallithea import CONFIG
36 import kallithea
37 from kallithea.lib import feeds
37 from kallithea.lib import feeds
38 from kallithea.lib import helpers as h
38 from kallithea.lib import helpers as h
39 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
39 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
40 from kallithea.lib.base import BaseRepoController
40 from kallithea.lib.base import BaseRepoController
41 from kallithea.lib.diffs import DiffProcessor
41 from kallithea.lib.diffs import DiffProcessor
42 from kallithea.lib.utils2 import asbool, safe_int, safe_str
42 from kallithea.lib.utils2 import asbool, safe_int, safe_str
43
43
44
44
45 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
46
46
47
47
48 class FeedController(BaseRepoController):
48 class FeedController(BaseRepoController):
49
49
50 @LoginRequired(allow_default_user=True)
50 @LoginRequired(allow_default_user=True)
51 @HasRepoPermissionLevelDecorator('read')
51 @HasRepoPermissionLevelDecorator('read')
52 def _before(self, *args, **kwargs):
52 def _before(self, *args, **kwargs):
53 super(FeedController, self)._before(*args, **kwargs)
53 super(FeedController, self)._before(*args, **kwargs)
54
54
55 def _get_title(self, cs):
55 def _get_title(self, cs):
56 return h.shorter(cs.message, 160)
56 return h.shorter(cs.message, 160)
57
57
58 def __get_desc(self, cs):
58 def __get_desc(self, cs):
59 desc_msg = [(_('%s committed on %s')
59 desc_msg = [(_('%s committed on %s')
60 % (h.person(cs.author), h.fmt_date(cs.date))) + '<br/>']
60 % (h.person(cs.author), h.fmt_date(cs.date))) + '<br/>']
61 # branches, tags, bookmarks
61 # branches, tags, bookmarks
62 for branch in cs.branches:
62 for branch in cs.branches:
63 desc_msg.append('branch: %s<br/>' % branch)
63 desc_msg.append('branch: %s<br/>' % branch)
64 for book in cs.bookmarks:
64 for book in cs.bookmarks:
65 desc_msg.append('bookmark: %s<br/>' % book)
65 desc_msg.append('bookmark: %s<br/>' % book)
66 for tag in cs.tags:
66 for tag in cs.tags:
67 desc_msg.append('tag: %s<br/>' % tag)
67 desc_msg.append('tag: %s<br/>' % tag)
68
68
69 changes = []
69 changes = []
70 diff_limit = safe_int(CONFIG.get('rss_cut_off_limit', 32 * 1024))
70 diff_limit = safe_int(kallithea.CONFIG.get('rss_cut_off_limit', 32 * 1024))
71 raw_diff = cs.diff()
71 raw_diff = cs.diff()
72 diff_processor = DiffProcessor(raw_diff,
72 diff_processor = DiffProcessor(raw_diff,
73 diff_limit=diff_limit,
73 diff_limit=diff_limit,
74 inline_diff=False)
74 inline_diff=False)
75
75
76 for st in diff_processor.parsed:
76 for st in diff_processor.parsed:
77 st.update({'added': st['stats']['added'],
77 st.update({'added': st['stats']['added'],
78 'removed': st['stats']['deleted']})
78 'removed': st['stats']['deleted']})
79 changes.append('\n %(operation)s %(filename)s '
79 changes.append('\n %(operation)s %(filename)s '
80 '(%(added)s lines added, %(removed)s lines removed)'
80 '(%(added)s lines added, %(removed)s lines removed)'
81 % st)
81 % st)
82 if diff_processor.limited_diff:
82 if diff_processor.limited_diff:
83 changes = changes + ['\n ' +
83 changes = changes + ['\n ' +
84 _('Changeset was too big and was cut off...')]
84 _('Changeset was too big and was cut off...')]
85
85
86 # rev link
86 # rev link
87 _url = h.canonical_url('changeset_home', repo_name=c.db_repo.repo_name,
87 _url = h.canonical_url('changeset_home', repo_name=c.db_repo.repo_name,
88 revision=cs.raw_id)
88 revision=cs.raw_id)
89 desc_msg.append('changeset: <a href="%s">%s</a>' % (_url, cs.raw_id[:8]))
89 desc_msg.append('changeset: <a href="%s">%s</a>' % (_url, cs.raw_id[:8]))
90
90
91 desc_msg.append('<pre>')
91 desc_msg.append('<pre>')
92 desc_msg.append(h.urlify_text(cs.message))
92 desc_msg.append(h.urlify_text(cs.message))
93 desc_msg.append('\n')
93 desc_msg.append('\n')
94 desc_msg.extend(changes)
94 desc_msg.extend(changes)
95 if asbool(CONFIG.get('rss_include_diff', False)):
95 if asbool(kallithea.CONFIG.get('rss_include_diff', False)):
96 desc_msg.append('\n\n')
96 desc_msg.append('\n\n')
97 desc_msg.append(safe_str(raw_diff))
97 desc_msg.append(safe_str(raw_diff))
98 desc_msg.append('</pre>')
98 desc_msg.append('</pre>')
99 return desc_msg
99 return desc_msg
100
100
101 def _feed(self, repo_name, feeder):
101 def _feed(self, repo_name, feeder):
102 """Produce a simple feed"""
102 """Produce a simple feed"""
103
103
104 @cache_region('long_term_file', '_get_feed_from_cache')
104 @cache_region('long_term_file', '_get_feed_from_cache')
105 def _get_feed_from_cache(*_cache_keys): # parameters are not really used - only as caching key
105 def _get_feed_from_cache(*_cache_keys): # parameters are not really used - only as caching key
106 header = dict(
106 header = dict(
107 title=_('%s %s feed') % (c.site_name, repo_name),
107 title=_('%s %s feed') % (c.site_name, repo_name),
108 link=h.canonical_url('summary_home', repo_name=repo_name),
108 link=h.canonical_url('summary_home', repo_name=repo_name),
109 description=_('Changes on %s repository') % repo_name,
109 description=_('Changes on %s repository') % repo_name,
110 )
110 )
111
111
112 rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20))
112 rss_items_per_page = safe_int(kallithea.CONFIG.get('rss_items_per_page', 20))
113 entries=[]
113 entries=[]
114 for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])):
114 for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])):
115 entries.append(dict(
115 entries.append(dict(
116 title=self._get_title(cs),
116 title=self._get_title(cs),
117 link=h.canonical_url('changeset_home', repo_name=repo_name, revision=cs.raw_id),
117 link=h.canonical_url('changeset_home', repo_name=repo_name, revision=cs.raw_id),
118 author_email=cs.author_email,
118 author_email=cs.author_email,
119 author_name=cs.author_name,
119 author_name=cs.author_name,
120 description=''.join(self.__get_desc(cs)),
120 description=''.join(self.__get_desc(cs)),
121 pubdate=cs.date,
121 pubdate=cs.date,
122 ))
122 ))
123 return feeder.render(header, entries)
123 return feeder.render(header, entries)
124
124
125 response.content_type = feeder.content_type
125 response.content_type = feeder.content_type
126 return _get_feed_from_cache(repo_name, feeder.__name__)
126 return _get_feed_from_cache(repo_name, feeder.__name__)
127
127
128 def atom(self, repo_name):
128 def atom(self, repo_name):
129 """Produce a simple atom-1.0 feed"""
129 """Produce a simple atom-1.0 feed"""
130 return self._feed(repo_name, feeds.AtomFeed)
130 return self._feed(repo_name, feeds.AtomFeed)
131
131
132 def rss(self, repo_name):
132 def rss(self, repo_name):
133 """Produce a simple rss2 feed"""
133 """Produce a simple rss2 feed"""
134 return self._feed(repo_name, feeds.RssFeed)
134 return self._feed(repo_name, feeds.RssFeed)
@@ -1,746 +1,746 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 kallithea.controllers.files
15 kallithea.controllers.files
16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~
16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~
17
17
18 Files controller for Kallithea
18 Files controller for Kallithea
19
19
20 This file was forked by the Kallithea project in July 2014.
20 This file was forked by the Kallithea project in July 2014.
21 Original author and date, and relevant copyright and licensing information is below:
21 Original author and date, and relevant copyright and licensing information is below:
22 :created_on: Apr 21, 2010
22 :created_on: Apr 21, 2010
23 :author: marcink
23 :author: marcink
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
24 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 :license: GPLv3, see LICENSE.md for more details.
25 :license: GPLv3, see LICENSE.md for more details.
26 """
26 """
27
27
28 import logging
28 import logging
29 import os
29 import os
30 import posixpath
30 import posixpath
31 import shutil
31 import shutil
32 import tempfile
32 import tempfile
33 import traceback
33 import traceback
34 from collections import OrderedDict
34 from collections import OrderedDict
35
35
36 from tg import request, response
36 from tg import request, response
37 from tg import tmpl_context as c
37 from tg import tmpl_context as c
38 from tg.i18n import ugettext as _
38 from tg.i18n import ugettext as _
39 from webob.exc import HTTPFound, HTTPNotFound
39 from webob.exc import HTTPFound, HTTPNotFound
40
40
41 import kallithea
41 from kallithea.config.routing import url
42 from kallithea.config.routing import url
42 from kallithea.lib import diffs
43 from kallithea.lib import diffs
43 from kallithea.lib import helpers as h
44 from kallithea.lib import helpers as h
44 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
45 from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
45 from kallithea.lib.base import BaseRepoController, jsonify, render
46 from kallithea.lib.base import BaseRepoController, jsonify, render
46 from kallithea.lib.exceptions import NonRelativePathError
47 from kallithea.lib.exceptions import NonRelativePathError
47 from kallithea.lib.utils import action_logger
48 from kallithea.lib.utils import action_logger
48 from kallithea.lib.utils2 import asbool, convert_line_endings, detect_mode, safe_str
49 from kallithea.lib.utils2 import asbool, convert_line_endings, detect_mode, safe_str
49 from kallithea.lib.vcs.backends.base import EmptyChangeset
50 from kallithea.lib.vcs.backends.base import EmptyChangeset
50 from kallithea.lib.vcs.conf import settings
51 from kallithea.lib.vcs.conf import settings
51 from kallithea.lib.vcs.exceptions import (ChangesetDoesNotExistError, ChangesetError, EmptyRepositoryError, ImproperArchiveTypeError, NodeAlreadyExistsError,
52 from kallithea.lib.vcs.exceptions import (ChangesetDoesNotExistError, ChangesetError, EmptyRepositoryError, ImproperArchiveTypeError, NodeAlreadyExistsError,
52 NodeDoesNotExistError, NodeError, RepositoryError, VCSError)
53 NodeDoesNotExistError, NodeError, RepositoryError, VCSError)
53 from kallithea.lib.vcs.nodes import FileNode
54 from kallithea.lib.vcs.nodes import FileNode
54 from kallithea.model import db
55 from kallithea.model import db
55 from kallithea.model.repo import RepoModel
56 from kallithea.model.repo import RepoModel
56 from kallithea.model.scm import ScmModel
57 from kallithea.model.scm import ScmModel
57
58
58
59
59 log = logging.getLogger(__name__)
60 log = logging.getLogger(__name__)
60
61
61
62
62 class FilesController(BaseRepoController):
63 class FilesController(BaseRepoController):
63
64
64 def _before(self, *args, **kwargs):
65 def _before(self, *args, **kwargs):
65 super(FilesController, self)._before(*args, **kwargs)
66 super(FilesController, self)._before(*args, **kwargs)
66
67
67 def __get_cs(self, rev, silent_empty=False):
68 def __get_cs(self, rev, silent_empty=False):
68 """
69 """
69 Safe way to get changeset if error occur it redirects to tip with
70 Safe way to get changeset if error occur it redirects to tip with
70 proper message
71 proper message
71
72
72 :param rev: revision to fetch
73 :param rev: revision to fetch
73 :silent_empty: return None if repository is empty
74 :silent_empty: return None if repository is empty
74 """
75 """
75
76
76 try:
77 try:
77 return c.db_repo_scm_instance.get_changeset(rev)
78 return c.db_repo_scm_instance.get_changeset(rev)
78 except EmptyRepositoryError as e:
79 except EmptyRepositoryError as e:
79 if silent_empty:
80 if silent_empty:
80 return None
81 return None
81 url_ = url('files_add_home',
82 url_ = url('files_add_home',
82 repo_name=c.repo_name,
83 repo_name=c.repo_name,
83 revision=0, f_path='', anchor='edit')
84 revision=0, f_path='', anchor='edit')
84 add_new = h.link_to(_('Click here to add new file'), url_, class_="alert-link")
85 add_new = h.link_to(_('Click here to add new file'), url_, class_="alert-link")
85 h.flash(_('There are no files yet.') + ' ' + add_new, category='warning')
86 h.flash(_('There are no files yet.') + ' ' + add_new, category='warning')
86 raise HTTPNotFound()
87 raise HTTPNotFound()
87 except (ChangesetDoesNotExistError, LookupError):
88 except (ChangesetDoesNotExistError, LookupError):
88 msg = _('Such revision does not exist for this repository')
89 msg = _('Such revision does not exist for this repository')
89 h.flash(msg, category='error')
90 h.flash(msg, category='error')
90 raise HTTPNotFound()
91 raise HTTPNotFound()
91 except RepositoryError as e:
92 except RepositoryError as e:
92 h.flash(e, category='error')
93 h.flash(e, category='error')
93 raise HTTPNotFound()
94 raise HTTPNotFound()
94
95
95 def __get_filenode(self, cs, path):
96 def __get_filenode(self, cs, path):
96 """
97 """
97 Returns file_node or raise HTTP error.
98 Returns file_node or raise HTTP error.
98
99
99 :param cs: given changeset
100 :param cs: given changeset
100 :param path: path to lookup
101 :param path: path to lookup
101 """
102 """
102
103
103 try:
104 try:
104 file_node = cs.get_node(path)
105 file_node = cs.get_node(path)
105 if file_node.is_dir():
106 if file_node.is_dir():
106 raise RepositoryError('given path is a directory')
107 raise RepositoryError('given path is a directory')
107 except ChangesetDoesNotExistError:
108 except ChangesetDoesNotExistError:
108 msg = _('Such revision does not exist for this repository')
109 msg = _('Such revision does not exist for this repository')
109 h.flash(msg, category='error')
110 h.flash(msg, category='error')
110 raise HTTPNotFound()
111 raise HTTPNotFound()
111 except RepositoryError as e:
112 except RepositoryError as e:
112 h.flash(e, category='error')
113 h.flash(e, category='error')
113 raise HTTPNotFound()
114 raise HTTPNotFound()
114
115
115 return file_node
116 return file_node
116
117
117 @LoginRequired(allow_default_user=True)
118 @LoginRequired(allow_default_user=True)
118 @HasRepoPermissionLevelDecorator('read')
119 @HasRepoPermissionLevelDecorator('read')
119 def index(self, repo_name, revision, f_path, annotate=False):
120 def index(self, repo_name, revision, f_path, annotate=False):
120 # redirect to given revision from form if given
121 # redirect to given revision from form if given
121 post_revision = request.POST.get('at_rev', None)
122 post_revision = request.POST.get('at_rev', None)
122 if post_revision:
123 if post_revision:
123 cs = self.__get_cs(post_revision) # FIXME - unused!
124 cs = self.__get_cs(post_revision) # FIXME - unused!
124
125
125 c.revision = revision
126 c.revision = revision
126 c.changeset = self.__get_cs(revision)
127 c.changeset = self.__get_cs(revision)
127 c.branch = request.GET.get('branch', None)
128 c.branch = request.GET.get('branch', None)
128 c.f_path = f_path
129 c.f_path = f_path
129 c.annotate = annotate
130 c.annotate = annotate
130 cur_rev = c.changeset.revision
131 cur_rev = c.changeset.revision
131 # used in files_source.html:
132 # used in files_source.html:
132 c.cut_off_limit = self.cut_off_limit
133 c.cut_off_limit = self.cut_off_limit
133 c.fulldiff = request.GET.get('fulldiff')
134 c.fulldiff = request.GET.get('fulldiff')
134
135
135 # prev link
136 # prev link
136 try:
137 try:
137 prev_rev = c.db_repo_scm_instance.get_changeset(cur_rev).prev(c.branch)
138 prev_rev = c.db_repo_scm_instance.get_changeset(cur_rev).prev(c.branch)
138 c.url_prev = url('files_home', repo_name=c.repo_name,
139 c.url_prev = url('files_home', repo_name=c.repo_name,
139 revision=prev_rev.raw_id, f_path=f_path)
140 revision=prev_rev.raw_id, f_path=f_path)
140 if c.branch:
141 if c.branch:
141 c.url_prev += '?branch=%s' % c.branch
142 c.url_prev += '?branch=%s' % c.branch
142 except (ChangesetDoesNotExistError, VCSError):
143 except (ChangesetDoesNotExistError, VCSError):
143 c.url_prev = '#'
144 c.url_prev = '#'
144
145
145 # next link
146 # next link
146 try:
147 try:
147 next_rev = c.db_repo_scm_instance.get_changeset(cur_rev).next(c.branch)
148 next_rev = c.db_repo_scm_instance.get_changeset(cur_rev).next(c.branch)
148 c.url_next = url('files_home', repo_name=c.repo_name,
149 c.url_next = url('files_home', repo_name=c.repo_name,
149 revision=next_rev.raw_id, f_path=f_path)
150 revision=next_rev.raw_id, f_path=f_path)
150 if c.branch:
151 if c.branch:
151 c.url_next += '?branch=%s' % c.branch
152 c.url_next += '?branch=%s' % c.branch
152 except (ChangesetDoesNotExistError, VCSError):
153 except (ChangesetDoesNotExistError, VCSError):
153 c.url_next = '#'
154 c.url_next = '#'
154
155
155 # files or dirs
156 # files or dirs
156 try:
157 try:
157 c.file = c.changeset.get_node(f_path)
158 c.file = c.changeset.get_node(f_path)
158
159
159 if c.file.is_submodule():
160 if c.file.is_submodule():
160 raise HTTPFound(location=c.file.url)
161 raise HTTPFound(location=c.file.url)
161 elif c.file.is_file():
162 elif c.file.is_file():
162 c.load_full_history = False
163 c.load_full_history = False
163 # determine if we're on branch head
164 # determine if we're on branch head
164 _branches = c.db_repo_scm_instance.branches
165 _branches = c.db_repo_scm_instance.branches
165 c.on_branch_head = revision in _branches or revision in _branches.values()
166 c.on_branch_head = revision in _branches or revision in _branches.values()
166 _hist = []
167 _hist = []
167 c.file_history = []
168 c.file_history = []
168 if c.load_full_history:
169 if c.load_full_history:
169 c.file_history, _hist = self._get_node_history(c.changeset, f_path)
170 c.file_history, _hist = self._get_node_history(c.changeset, f_path)
170
171
171 c.authors = []
172 c.authors = []
172 for a in set([x.author for x in _hist]):
173 for a in set([x.author for x in _hist]):
173 c.authors.append((h.email(a), h.person(a)))
174 c.authors.append((h.email(a), h.person(a)))
174 else:
175 else:
175 c.authors = c.file_history = []
176 c.authors = c.file_history = []
176 except RepositoryError as e:
177 except RepositoryError as e:
177 h.flash(e, category='error')
178 h.flash(e, category='error')
178 raise HTTPNotFound()
179 raise HTTPNotFound()
179
180
180 if request.environ.get('HTTP_X_PARTIAL_XHR'):
181 if request.environ.get('HTTP_X_PARTIAL_XHR'):
181 return render('files/files_ypjax.html')
182 return render('files/files_ypjax.html')
182
183
183 # TODO: tags and bookmarks?
184 # TODO: tags and bookmarks?
184 c.revision_options = [(c.changeset.raw_id,
185 c.revision_options = [(c.changeset.raw_id,
185 _('%s at %s') % (b, h.short_id(c.changeset.raw_id))) for b in c.changeset.branches] + \
186 _('%s at %s') % (b, h.short_id(c.changeset.raw_id))) for b in c.changeset.branches] + \
186 [(n, b) for b, n in c.db_repo_scm_instance.branches.items()]
187 [(n, b) for b, n in c.db_repo_scm_instance.branches.items()]
187 if c.db_repo_scm_instance.closed_branches:
188 if c.db_repo_scm_instance.closed_branches:
188 prefix = _('(closed)') + ' '
189 prefix = _('(closed)') + ' '
189 c.revision_options += [('-', '-')] + \
190 c.revision_options += [('-', '-')] + \
190 [(n, prefix + b) for b, n in c.db_repo_scm_instance.closed_branches.items()]
191 [(n, prefix + b) for b, n in c.db_repo_scm_instance.closed_branches.items()]
191
192
192 return render('files/files.html')
193 return render('files/files.html')
193
194
194 @LoginRequired(allow_default_user=True)
195 @LoginRequired(allow_default_user=True)
195 @HasRepoPermissionLevelDecorator('read')
196 @HasRepoPermissionLevelDecorator('read')
196 @jsonify
197 @jsonify
197 def history(self, repo_name, revision, f_path):
198 def history(self, repo_name, revision, f_path):
198 changeset = self.__get_cs(revision)
199 changeset = self.__get_cs(revision)
199 _file = changeset.get_node(f_path)
200 _file = changeset.get_node(f_path)
200 if _file.is_file():
201 if _file.is_file():
201 file_history, _hist = self._get_node_history(changeset, f_path)
202 file_history, _hist = self._get_node_history(changeset, f_path)
202
203
203 res = []
204 res = []
204 for obj in file_history:
205 for obj in file_history:
205 res.append({
206 res.append({
206 'text': obj[1],
207 'text': obj[1],
207 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]]
208 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]]
208 })
209 })
209
210
210 data = {
211 data = {
211 'more': False,
212 'more': False,
212 'results': res
213 'results': res
213 }
214 }
214 return data
215 return data
215
216
216 @LoginRequired(allow_default_user=True)
217 @LoginRequired(allow_default_user=True)
217 @HasRepoPermissionLevelDecorator('read')
218 @HasRepoPermissionLevelDecorator('read')
218 def authors(self, repo_name, revision, f_path):
219 def authors(self, repo_name, revision, f_path):
219 changeset = self.__get_cs(revision)
220 changeset = self.__get_cs(revision)
220 _file = changeset.get_node(f_path)
221 _file = changeset.get_node(f_path)
221 if _file.is_file():
222 if _file.is_file():
222 file_history, _hist = self._get_node_history(changeset, f_path)
223 file_history, _hist = self._get_node_history(changeset, f_path)
223 c.authors = []
224 c.authors = []
224 for a in set([x.author for x in _hist]):
225 for a in set([x.author for x in _hist]):
225 c.authors.append((h.email(a), h.person(a)))
226 c.authors.append((h.email(a), h.person(a)))
226 return render('files/files_history_box.html')
227 return render('files/files_history_box.html')
227
228
228 @LoginRequired(allow_default_user=True)
229 @LoginRequired(allow_default_user=True)
229 @HasRepoPermissionLevelDecorator('read')
230 @HasRepoPermissionLevelDecorator('read')
230 def rawfile(self, repo_name, revision, f_path):
231 def rawfile(self, repo_name, revision, f_path):
231 cs = self.__get_cs(revision)
232 cs = self.__get_cs(revision)
232 file_node = self.__get_filenode(cs, f_path)
233 file_node = self.__get_filenode(cs, f_path)
233
234
234 response.content_disposition = \
235 response.content_disposition = \
235 'attachment; filename=%s' % f_path.split(db.URL_SEP)[-1]
236 'attachment; filename=%s' % f_path.split(db.URL_SEP)[-1]
236
237
237 response.content_type = file_node.mimetype
238 response.content_type = file_node.mimetype
238 return file_node.content
239 return file_node.content
239
240
240 @LoginRequired(allow_default_user=True)
241 @LoginRequired(allow_default_user=True)
241 @HasRepoPermissionLevelDecorator('read')
242 @HasRepoPermissionLevelDecorator('read')
242 def raw(self, repo_name, revision, f_path):
243 def raw(self, repo_name, revision, f_path):
243 cs = self.__get_cs(revision)
244 cs = self.__get_cs(revision)
244 file_node = self.__get_filenode(cs, f_path)
245 file_node = self.__get_filenode(cs, f_path)
245
246
246 raw_mimetype_mapping = {
247 raw_mimetype_mapping = {
247 # map original mimetype to a mimetype used for "show as raw"
248 # map original mimetype to a mimetype used for "show as raw"
248 # you can also provide a content-disposition to override the
249 # you can also provide a content-disposition to override the
249 # default "attachment" disposition.
250 # default "attachment" disposition.
250 # orig_type: (new_type, new_dispo)
251 # orig_type: (new_type, new_dispo)
251
252
252 # show images inline:
253 # show images inline:
253 'image/x-icon': ('image/x-icon', 'inline'),
254 'image/x-icon': ('image/x-icon', 'inline'),
254 'image/png': ('image/png', 'inline'),
255 'image/png': ('image/png', 'inline'),
255 'image/gif': ('image/gif', 'inline'),
256 'image/gif': ('image/gif', 'inline'),
256 'image/jpeg': ('image/jpeg', 'inline'),
257 'image/jpeg': ('image/jpeg', 'inline'),
257 'image/svg+xml': ('image/svg+xml', 'inline'),
258 'image/svg+xml': ('image/svg+xml', 'inline'),
258 }
259 }
259
260
260 mimetype = file_node.mimetype
261 mimetype = file_node.mimetype
261 try:
262 try:
262 mimetype, dispo = raw_mimetype_mapping[mimetype]
263 mimetype, dispo = raw_mimetype_mapping[mimetype]
263 except KeyError:
264 except KeyError:
264 # we don't know anything special about this, handle it safely
265 # we don't know anything special about this, handle it safely
265 if file_node.is_binary:
266 if file_node.is_binary:
266 # do same as download raw for binary files
267 # do same as download raw for binary files
267 mimetype, dispo = 'application/octet-stream', 'attachment'
268 mimetype, dispo = 'application/octet-stream', 'attachment'
268 else:
269 else:
269 # do not just use the original mimetype, but force text/plain,
270 # do not just use the original mimetype, but force text/plain,
270 # otherwise it would serve text/html and that might be unsafe.
271 # otherwise it would serve text/html and that might be unsafe.
271 # Note: underlying vcs library fakes text/plain mimetype if the
272 # Note: underlying vcs library fakes text/plain mimetype if the
272 # mimetype can not be determined and it thinks it is not
273 # mimetype can not be determined and it thinks it is not
273 # binary.This might lead to erroneous text display in some
274 # binary.This might lead to erroneous text display in some
274 # cases, but helps in other cases, like with text files
275 # cases, but helps in other cases, like with text files
275 # without extension.
276 # without extension.
276 mimetype, dispo = 'text/plain', 'inline'
277 mimetype, dispo = 'text/plain', 'inline'
277
278
278 if dispo == 'attachment':
279 if dispo == 'attachment':
279 dispo = 'attachment; filename=%s' % f_path.split(os.sep)[-1]
280 dispo = 'attachment; filename=%s' % f_path.split(os.sep)[-1]
280
281
281 response.content_disposition = dispo
282 response.content_disposition = dispo
282 response.content_type = mimetype
283 response.content_type = mimetype
283 return file_node.content
284 return file_node.content
284
285
285 @LoginRequired()
286 @LoginRequired()
286 @HasRepoPermissionLevelDecorator('write')
287 @HasRepoPermissionLevelDecorator('write')
287 def delete(self, repo_name, revision, f_path):
288 def delete(self, repo_name, revision, f_path):
288 repo = c.db_repo
289 repo = c.db_repo
289 # check if revision is a branch identifier- basically we cannot
290 # check if revision is a branch identifier- basically we cannot
290 # create multiple heads via file editing
291 # create multiple heads via file editing
291 _branches = repo.scm_instance.branches
292 _branches = repo.scm_instance.branches
292 # check if revision is a branch name or branch hash
293 # check if revision is a branch name or branch hash
293 if revision not in _branches and revision not in _branches.values():
294 if revision not in _branches and revision not in _branches.values():
294 h.flash(_('You can only delete files with revision '
295 h.flash(_('You can only delete files with revision '
295 'being a valid branch'), category='warning')
296 'being a valid branch'), category='warning')
296 raise HTTPFound(location=h.url('files_home',
297 raise HTTPFound(location=h.url('files_home',
297 repo_name=repo_name, revision='tip',
298 repo_name=repo_name, revision='tip',
298 f_path=f_path))
299 f_path=f_path))
299
300
300 r_post = request.POST
301 r_post = request.POST
301
302
302 c.cs = self.__get_cs(revision)
303 c.cs = self.__get_cs(revision)
303 c.file = self.__get_filenode(c.cs, f_path)
304 c.file = self.__get_filenode(c.cs, f_path)
304
305
305 c.default_message = _('Deleted file %s via Kallithea') % (f_path)
306 c.default_message = _('Deleted file %s via Kallithea') % (f_path)
306 c.f_path = f_path
307 c.f_path = f_path
307 node_path = f_path
308 node_path = f_path
308 author = request.authuser.full_contact
309 author = request.authuser.full_contact
309
310
310 if r_post:
311 if r_post:
311 message = r_post.get('message') or c.default_message
312 message = r_post.get('message') or c.default_message
312
313
313 try:
314 try:
314 nodes = {
315 nodes = {
315 node_path: {
316 node_path: {
316 'content': ''
317 'content': ''
317 }
318 }
318 }
319 }
319 self.scm_model.delete_nodes(
320 self.scm_model.delete_nodes(
320 user=request.authuser.user_id,
321 user=request.authuser.user_id,
321 ip_addr=request.ip_addr,
322 ip_addr=request.ip_addr,
322 repo=c.db_repo,
323 repo=c.db_repo,
323 message=message,
324 message=message,
324 nodes=nodes,
325 nodes=nodes,
325 parent_cs=c.cs,
326 parent_cs=c.cs,
326 author=author,
327 author=author,
327 )
328 )
328
329
329 h.flash(_('Successfully deleted file %s') % f_path,
330 h.flash(_('Successfully deleted file %s') % f_path,
330 category='success')
331 category='success')
331 except Exception:
332 except Exception:
332 log.error(traceback.format_exc())
333 log.error(traceback.format_exc())
333 h.flash(_('Error occurred during commit'), category='error')
334 h.flash(_('Error occurred during commit'), category='error')
334 raise HTTPFound(location=url('changeset_home',
335 raise HTTPFound(location=url('changeset_home',
335 repo_name=c.repo_name, revision='tip'))
336 repo_name=c.repo_name, revision='tip'))
336
337
337 return render('files/files_delete.html')
338 return render('files/files_delete.html')
338
339
339 @LoginRequired()
340 @LoginRequired()
340 @HasRepoPermissionLevelDecorator('write')
341 @HasRepoPermissionLevelDecorator('write')
341 def edit(self, repo_name, revision, f_path):
342 def edit(self, repo_name, revision, f_path):
342 repo = c.db_repo
343 repo = c.db_repo
343 # check if revision is a branch identifier- basically we cannot
344 # check if revision is a branch identifier- basically we cannot
344 # create multiple heads via file editing
345 # create multiple heads via file editing
345 _branches = repo.scm_instance.branches
346 _branches = repo.scm_instance.branches
346 # check if revision is a branch name or branch hash
347 # check if revision is a branch name or branch hash
347 if revision not in _branches and revision not in _branches.values():
348 if revision not in _branches and revision not in _branches.values():
348 h.flash(_('You can only edit files with revision '
349 h.flash(_('You can only edit files with revision '
349 'being a valid branch'), category='warning')
350 'being a valid branch'), category='warning')
350 raise HTTPFound(location=h.url('files_home',
351 raise HTTPFound(location=h.url('files_home',
351 repo_name=repo_name, revision='tip',
352 repo_name=repo_name, revision='tip',
352 f_path=f_path))
353 f_path=f_path))
353
354
354 r_post = request.POST
355 r_post = request.POST
355
356
356 c.cs = self.__get_cs(revision)
357 c.cs = self.__get_cs(revision)
357 c.file = self.__get_filenode(c.cs, f_path)
358 c.file = self.__get_filenode(c.cs, f_path)
358
359
359 if c.file.is_binary:
360 if c.file.is_binary:
360 raise HTTPFound(location=url('files_home', repo_name=c.repo_name,
361 raise HTTPFound(location=url('files_home', repo_name=c.repo_name,
361 revision=c.cs.raw_id, f_path=f_path))
362 revision=c.cs.raw_id, f_path=f_path))
362 c.default_message = _('Edited file %s via Kallithea') % (f_path)
363 c.default_message = _('Edited file %s via Kallithea') % (f_path)
363 c.f_path = f_path
364 c.f_path = f_path
364
365
365 if r_post:
366 if r_post:
366 old_content = safe_str(c.file.content)
367 old_content = safe_str(c.file.content)
367 sl = old_content.splitlines(1)
368 sl = old_content.splitlines(1)
368 first_line = sl[0] if sl else ''
369 first_line = sl[0] if sl else ''
369 # modes: 0 - Unix, 1 - Mac, 2 - DOS
370 # modes: 0 - Unix, 1 - Mac, 2 - DOS
370 mode = detect_mode(first_line, 0)
371 mode = detect_mode(first_line, 0)
371 content = convert_line_endings(r_post.get('content', ''), mode)
372 content = convert_line_endings(r_post.get('content', ''), mode)
372
373
373 message = r_post.get('message') or c.default_message
374 message = r_post.get('message') or c.default_message
374 author = request.authuser.full_contact
375 author = request.authuser.full_contact
375
376
376 if content == old_content:
377 if content == old_content:
377 h.flash(_('No changes'), category='warning')
378 h.flash(_('No changes'), category='warning')
378 raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
379 raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
379 revision='tip'))
380 revision='tip'))
380 try:
381 try:
381 self.scm_model.commit_change(repo=c.db_repo_scm_instance,
382 self.scm_model.commit_change(repo=c.db_repo_scm_instance,
382 repo_name=repo_name, cs=c.cs,
383 repo_name=repo_name, cs=c.cs,
383 user=request.authuser.user_id,
384 user=request.authuser.user_id,
384 ip_addr=request.ip_addr,
385 ip_addr=request.ip_addr,
385 author=author, message=message,
386 author=author, message=message,
386 content=content, f_path=f_path)
387 content=content, f_path=f_path)
387 h.flash(_('Successfully committed to %s') % f_path,
388 h.flash(_('Successfully committed to %s') % f_path,
388 category='success')
389 category='success')
389 except Exception:
390 except Exception:
390 log.error(traceback.format_exc())
391 log.error(traceback.format_exc())
391 h.flash(_('Error occurred during commit'), category='error')
392 h.flash(_('Error occurred during commit'), category='error')
392 raise HTTPFound(location=url('changeset_home',
393 raise HTTPFound(location=url('changeset_home',
393 repo_name=c.repo_name, revision='tip'))
394 repo_name=c.repo_name, revision='tip'))
394
395
395 return render('files/files_edit.html')
396 return render('files/files_edit.html')
396
397
397 @LoginRequired()
398 @LoginRequired()
398 @HasRepoPermissionLevelDecorator('write')
399 @HasRepoPermissionLevelDecorator('write')
399 def add(self, repo_name, revision, f_path):
400 def add(self, repo_name, revision, f_path):
400
401
401 repo = c.db_repo
402 repo = c.db_repo
402 r_post = request.POST
403 r_post = request.POST
403 c.cs = self.__get_cs(revision, silent_empty=True)
404 c.cs = self.__get_cs(revision, silent_empty=True)
404 if c.cs is None:
405 if c.cs is None:
405 c.cs = EmptyChangeset(alias=c.db_repo_scm_instance.alias)
406 c.cs = EmptyChangeset(alias=c.db_repo_scm_instance.alias)
406 c.default_message = (_('Added file via Kallithea'))
407 c.default_message = (_('Added file via Kallithea'))
407 c.f_path = f_path
408 c.f_path = f_path
408
409
409 if r_post:
410 if r_post:
410 unix_mode = 0
411 unix_mode = 0
411 content = convert_line_endings(r_post.get('content', ''), unix_mode)
412 content = convert_line_endings(r_post.get('content', ''), unix_mode)
412
413
413 message = r_post.get('message') or c.default_message
414 message = r_post.get('message') or c.default_message
414 filename = r_post.get('filename')
415 filename = r_post.get('filename')
415 location = r_post.get('location', '')
416 location = r_post.get('location', '')
416 file_obj = r_post.get('upload_file', None)
417 file_obj = r_post.get('upload_file', None)
417
418
418 if file_obj is not None and hasattr(file_obj, 'filename'):
419 if file_obj is not None and hasattr(file_obj, 'filename'):
419 filename = file_obj.filename
420 filename = file_obj.filename
420 content = file_obj.file
421 content = file_obj.file
421
422
422 if hasattr(content, 'file'):
423 if hasattr(content, 'file'):
423 # non posix systems store real file under file attr
424 # non posix systems store real file under file attr
424 content = content.file
425 content = content.file
425
426
426 if not content:
427 if not content:
427 h.flash(_('No content'), category='warning')
428 h.flash(_('No content'), category='warning')
428 raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
429 raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
429 revision='tip'))
430 revision='tip'))
430 if not filename:
431 if not filename:
431 h.flash(_('No filename'), category='warning')
432 h.flash(_('No filename'), category='warning')
432 raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
433 raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
433 revision='tip'))
434 revision='tip'))
434 # strip all crap out of file, just leave the basename
435 # strip all crap out of file, just leave the basename
435 filename = os.path.basename(filename)
436 filename = os.path.basename(filename)
436 node_path = posixpath.join(location, filename)
437 node_path = posixpath.join(location, filename)
437 author = request.authuser.full_contact
438 author = request.authuser.full_contact
438
439
439 try:
440 try:
440 nodes = {
441 nodes = {
441 node_path: {
442 node_path: {
442 'content': content
443 'content': content
443 }
444 }
444 }
445 }
445 self.scm_model.create_nodes(
446 self.scm_model.create_nodes(
446 user=request.authuser.user_id,
447 user=request.authuser.user_id,
447 ip_addr=request.ip_addr,
448 ip_addr=request.ip_addr,
448 repo=c.db_repo,
449 repo=c.db_repo,
449 message=message,
450 message=message,
450 nodes=nodes,
451 nodes=nodes,
451 parent_cs=c.cs,
452 parent_cs=c.cs,
452 author=author,
453 author=author,
453 )
454 )
454
455
455 h.flash(_('Successfully committed to %s') % node_path,
456 h.flash(_('Successfully committed to %s') % node_path,
456 category='success')
457 category='success')
457 except NonRelativePathError as e:
458 except NonRelativePathError as e:
458 h.flash(_('Location must be relative path and must not '
459 h.flash(_('Location must be relative path and must not '
459 'contain .. in path'), category='warning')
460 'contain .. in path'), category='warning')
460 raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
461 raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
461 revision='tip'))
462 revision='tip'))
462 except (NodeError, NodeAlreadyExistsError) as e:
463 except (NodeError, NodeAlreadyExistsError) as e:
463 h.flash(_(e), category='error')
464 h.flash(_(e), category='error')
464 except Exception:
465 except Exception:
465 log.error(traceback.format_exc())
466 log.error(traceback.format_exc())
466 h.flash(_('Error occurred during commit'), category='error')
467 h.flash(_('Error occurred during commit'), category='error')
467 raise HTTPFound(location=url('changeset_home',
468 raise HTTPFound(location=url('changeset_home',
468 repo_name=c.repo_name, revision='tip'))
469 repo_name=c.repo_name, revision='tip'))
469
470
470 return render('files/files_add.html')
471 return render('files/files_add.html')
471
472
472 @LoginRequired(allow_default_user=True)
473 @LoginRequired(allow_default_user=True)
473 @HasRepoPermissionLevelDecorator('read')
474 @HasRepoPermissionLevelDecorator('read')
474 def archivefile(self, repo_name, fname):
475 def archivefile(self, repo_name, fname):
475 fileformat = None
476 fileformat = None
476 revision = None
477 revision = None
477 ext = None
478 ext = None
478 subrepos = request.GET.get('subrepos') == 'true'
479 subrepos = request.GET.get('subrepos') == 'true'
479
480
480 for a_type, ext_data in settings.ARCHIVE_SPECS.items():
481 for a_type, ext_data in settings.ARCHIVE_SPECS.items():
481 archive_spec = fname.split(ext_data[1])
482 archive_spec = fname.split(ext_data[1])
482 if len(archive_spec) == 2 and archive_spec[1] == '':
483 if len(archive_spec) == 2 and archive_spec[1] == '':
483 fileformat = a_type or ext_data[1]
484 fileformat = a_type or ext_data[1]
484 revision = archive_spec[0]
485 revision = archive_spec[0]
485 ext = ext_data[1]
486 ext = ext_data[1]
486
487
487 try:
488 try:
488 dbrepo = RepoModel().get_by_repo_name(repo_name)
489 dbrepo = RepoModel().get_by_repo_name(repo_name)
489 if not dbrepo.enable_downloads:
490 if not dbrepo.enable_downloads:
490 return _('Downloads disabled') # TODO: do something else?
491 return _('Downloads disabled') # TODO: do something else?
491
492
492 if c.db_repo_scm_instance.alias == 'hg':
493 if c.db_repo_scm_instance.alias == 'hg':
493 # patch and reset hooks section of UI config to not run any
494 # patch and reset hooks section of UI config to not run any
494 # hooks on fetching archives with subrepos
495 # hooks on fetching archives with subrepos
495 for k, v in c.db_repo_scm_instance._repo.ui.configitems('hooks'):
496 for k, v in c.db_repo_scm_instance._repo.ui.configitems('hooks'):
496 c.db_repo_scm_instance._repo.ui.setconfig('hooks', k, None)
497 c.db_repo_scm_instance._repo.ui.setconfig('hooks', k, None)
497
498
498 cs = c.db_repo_scm_instance.get_changeset(revision)
499 cs = c.db_repo_scm_instance.get_changeset(revision)
499 content_type = settings.ARCHIVE_SPECS[fileformat][0]
500 content_type = settings.ARCHIVE_SPECS[fileformat][0]
500 except ChangesetDoesNotExistError:
501 except ChangesetDoesNotExistError:
501 return _('Unknown revision %s') % revision
502 return _('Unknown revision %s') % revision
502 except EmptyRepositoryError:
503 except EmptyRepositoryError:
503 return _('Empty repository')
504 return _('Empty repository')
504 except (ImproperArchiveTypeError, KeyError):
505 except (ImproperArchiveTypeError, KeyError):
505 return _('Unknown archive type')
506 return _('Unknown archive type')
506
507
507 from kallithea import CONFIG
508 rev_name = cs.raw_id[:12]
508 rev_name = cs.raw_id[:12]
509 archive_name = '%s-%s%s' % (repo_name.replace('/', '_'), rev_name, ext)
509 archive_name = '%s-%s%s' % (repo_name.replace('/', '_'), rev_name, ext)
510
510
511 archive_path = None
511 archive_path = None
512 cached_archive_path = None
512 cached_archive_path = None
513 archive_cache_dir = CONFIG.get('archive_cache_dir')
513 archive_cache_dir = kallithea.CONFIG.get('archive_cache_dir')
514 if archive_cache_dir and not subrepos: # TODO: subrepo caching?
514 if archive_cache_dir and not subrepos: # TODO: subrepo caching?
515 if not os.path.isdir(archive_cache_dir):
515 if not os.path.isdir(archive_cache_dir):
516 os.makedirs(archive_cache_dir)
516 os.makedirs(archive_cache_dir)
517 cached_archive_path = os.path.join(archive_cache_dir, archive_name)
517 cached_archive_path = os.path.join(archive_cache_dir, archive_name)
518 if os.path.isfile(cached_archive_path):
518 if os.path.isfile(cached_archive_path):
519 log.debug('Found cached archive in %s', cached_archive_path)
519 log.debug('Found cached archive in %s', cached_archive_path)
520 archive_path = cached_archive_path
520 archive_path = cached_archive_path
521 else:
521 else:
522 log.debug('Archive %s is not yet cached', archive_name)
522 log.debug('Archive %s is not yet cached', archive_name)
523
523
524 if archive_path is None:
524 if archive_path is None:
525 # generate new archive
525 # generate new archive
526 fd, archive_path = tempfile.mkstemp()
526 fd, archive_path = tempfile.mkstemp()
527 log.debug('Creating new temp archive in %s', archive_path)
527 log.debug('Creating new temp archive in %s', archive_path)
528 with os.fdopen(fd, 'wb') as stream:
528 with os.fdopen(fd, 'wb') as stream:
529 cs.fill_archive(stream=stream, kind=fileformat, subrepos=subrepos)
529 cs.fill_archive(stream=stream, kind=fileformat, subrepos=subrepos)
530 # stream (and thus fd) has been closed by cs.fill_archive
530 # stream (and thus fd) has been closed by cs.fill_archive
531 if cached_archive_path is not None:
531 if cached_archive_path is not None:
532 # we generated the archive - move it to cache
532 # we generated the archive - move it to cache
533 log.debug('Storing new archive in %s', cached_archive_path)
533 log.debug('Storing new archive in %s', cached_archive_path)
534 shutil.move(archive_path, cached_archive_path)
534 shutil.move(archive_path, cached_archive_path)
535 archive_path = cached_archive_path
535 archive_path = cached_archive_path
536
536
537 def get_chunked_archive(archive_path):
537 def get_chunked_archive(archive_path):
538 stream = open(archive_path, 'rb')
538 stream = open(archive_path, 'rb')
539 while True:
539 while True:
540 data = stream.read(16 * 1024)
540 data = stream.read(16 * 1024)
541 if not data:
541 if not data:
542 break
542 break
543 yield data
543 yield data
544 stream.close()
544 stream.close()
545 if archive_path != cached_archive_path:
545 if archive_path != cached_archive_path:
546 log.debug('Destroying temp archive %s', archive_path)
546 log.debug('Destroying temp archive %s', archive_path)
547 os.remove(archive_path)
547 os.remove(archive_path)
548
548
549 action_logger(user=request.authuser,
549 action_logger(user=request.authuser,
550 action='user_downloaded_archive:%s' % (archive_name),
550 action='user_downloaded_archive:%s' % (archive_name),
551 repo=repo_name, ipaddr=request.ip_addr, commit=True)
551 repo=repo_name, ipaddr=request.ip_addr, commit=True)
552
552
553 response.content_disposition = str('attachment; filename=%s' % (archive_name))
553 response.content_disposition = str('attachment; filename=%s' % (archive_name))
554 response.content_type = str(content_type)
554 response.content_type = str(content_type)
555 return get_chunked_archive(archive_path)
555 return get_chunked_archive(archive_path)
556
556
557 @LoginRequired(allow_default_user=True)
557 @LoginRequired(allow_default_user=True)
558 @HasRepoPermissionLevelDecorator('read')
558 @HasRepoPermissionLevelDecorator('read')
559 def diff(self, repo_name, f_path):
559 def diff(self, repo_name, f_path):
560 ignore_whitespace_diff = h.get_ignore_whitespace_diff(request.GET)
560 ignore_whitespace_diff = h.get_ignore_whitespace_diff(request.GET)
561 diff_context_size = h.get_diff_context_size(request.GET)
561 diff_context_size = h.get_diff_context_size(request.GET)
562 diff2 = request.GET.get('diff2', '')
562 diff2 = request.GET.get('diff2', '')
563 diff1 = request.GET.get('diff1', '') or diff2
563 diff1 = request.GET.get('diff1', '') or diff2
564 c.action = request.GET.get('diff')
564 c.action = request.GET.get('diff')
565 c.no_changes = diff1 == diff2
565 c.no_changes = diff1 == diff2
566 c.f_path = f_path
566 c.f_path = f_path
567 c.big_diff = False
567 c.big_diff = False
568 fulldiff = request.GET.get('fulldiff')
568 fulldiff = request.GET.get('fulldiff')
569 c.changes = OrderedDict()
569 c.changes = OrderedDict()
570 c.changes[diff2] = []
570 c.changes[diff2] = []
571
571
572 # special case if we want a show rev only, it's impl here
572 # special case if we want a show rev only, it's impl here
573 # to reduce JS and callbacks
573 # to reduce JS and callbacks
574
574
575 if request.GET.get('show_rev'):
575 if request.GET.get('show_rev'):
576 if asbool(request.GET.get('annotate', 'False')):
576 if asbool(request.GET.get('annotate', 'False')):
577 _url = url('files_annotate_home', repo_name=c.repo_name,
577 _url = url('files_annotate_home', repo_name=c.repo_name,
578 revision=diff1, f_path=c.f_path)
578 revision=diff1, f_path=c.f_path)
579 else:
579 else:
580 _url = url('files_home', repo_name=c.repo_name,
580 _url = url('files_home', repo_name=c.repo_name,
581 revision=diff1, f_path=c.f_path)
581 revision=diff1, f_path=c.f_path)
582
582
583 raise HTTPFound(location=_url)
583 raise HTTPFound(location=_url)
584 try:
584 try:
585 if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]:
585 if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]:
586 c.changeset_1 = c.db_repo_scm_instance.get_changeset(diff1)
586 c.changeset_1 = c.db_repo_scm_instance.get_changeset(diff1)
587 try:
587 try:
588 node1 = c.changeset_1.get_node(f_path)
588 node1 = c.changeset_1.get_node(f_path)
589 if node1.is_dir():
589 if node1.is_dir():
590 raise NodeError('%s path is a %s not a file'
590 raise NodeError('%s path is a %s not a file'
591 % (node1, type(node1)))
591 % (node1, type(node1)))
592 except NodeDoesNotExistError:
592 except NodeDoesNotExistError:
593 c.changeset_1 = EmptyChangeset(cs=diff1,
593 c.changeset_1 = EmptyChangeset(cs=diff1,
594 revision=c.changeset_1.revision,
594 revision=c.changeset_1.revision,
595 repo=c.db_repo_scm_instance)
595 repo=c.db_repo_scm_instance)
596 node1 = FileNode(f_path, '', changeset=c.changeset_1)
596 node1 = FileNode(f_path, '', changeset=c.changeset_1)
597 else:
597 else:
598 c.changeset_1 = EmptyChangeset(repo=c.db_repo_scm_instance)
598 c.changeset_1 = EmptyChangeset(repo=c.db_repo_scm_instance)
599 node1 = FileNode(f_path, '', changeset=c.changeset_1)
599 node1 = FileNode(f_path, '', changeset=c.changeset_1)
600
600
601 if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]:
601 if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]:
602 c.changeset_2 = c.db_repo_scm_instance.get_changeset(diff2)
602 c.changeset_2 = c.db_repo_scm_instance.get_changeset(diff2)
603 try:
603 try:
604 node2 = c.changeset_2.get_node(f_path)
604 node2 = c.changeset_2.get_node(f_path)
605 if node2.is_dir():
605 if node2.is_dir():
606 raise NodeError('%s path is a %s not a file'
606 raise NodeError('%s path is a %s not a file'
607 % (node2, type(node2)))
607 % (node2, type(node2)))
608 except NodeDoesNotExistError:
608 except NodeDoesNotExistError:
609 c.changeset_2 = EmptyChangeset(cs=diff2,
609 c.changeset_2 = EmptyChangeset(cs=diff2,
610 revision=c.changeset_2.revision,
610 revision=c.changeset_2.revision,
611 repo=c.db_repo_scm_instance)
611 repo=c.db_repo_scm_instance)
612 node2 = FileNode(f_path, '', changeset=c.changeset_2)
612 node2 = FileNode(f_path, '', changeset=c.changeset_2)
613 else:
613 else:
614 c.changeset_2 = EmptyChangeset(repo=c.db_repo_scm_instance)
614 c.changeset_2 = EmptyChangeset(repo=c.db_repo_scm_instance)
615 node2 = FileNode(f_path, '', changeset=c.changeset_2)
615 node2 = FileNode(f_path, '', changeset=c.changeset_2)
616 except (RepositoryError, NodeError):
616 except (RepositoryError, NodeError):
617 log.error(traceback.format_exc())
617 log.error(traceback.format_exc())
618 raise HTTPFound(location=url('files_home', repo_name=c.repo_name,
618 raise HTTPFound(location=url('files_home', repo_name=c.repo_name,
619 f_path=f_path))
619 f_path=f_path))
620
620
621 if c.action == 'download':
621 if c.action == 'download':
622 raw_diff = diffs.get_gitdiff(node1, node2,
622 raw_diff = diffs.get_gitdiff(node1, node2,
623 ignore_whitespace=ignore_whitespace_diff,
623 ignore_whitespace=ignore_whitespace_diff,
624 context=diff_context_size)
624 context=diff_context_size)
625 diff_name = '%s_vs_%s.diff' % (diff1, diff2)
625 diff_name = '%s_vs_%s.diff' % (diff1, diff2)
626 response.content_type = 'text/plain'
626 response.content_type = 'text/plain'
627 response.content_disposition = (
627 response.content_disposition = (
628 'attachment; filename=%s' % diff_name
628 'attachment; filename=%s' % diff_name
629 )
629 )
630 return raw_diff
630 return raw_diff
631
631
632 elif c.action == 'raw':
632 elif c.action == 'raw':
633 raw_diff = diffs.get_gitdiff(node1, node2,
633 raw_diff = diffs.get_gitdiff(node1, node2,
634 ignore_whitespace=ignore_whitespace_diff,
634 ignore_whitespace=ignore_whitespace_diff,
635 context=diff_context_size)
635 context=diff_context_size)
636 response.content_type = 'text/plain'
636 response.content_type = 'text/plain'
637 return raw_diff
637 return raw_diff
638
638
639 else:
639 else:
640 fid = h.FID(diff2, node2.path)
640 fid = h.FID(diff2, node2.path)
641 diff_limit = None if fulldiff else self.cut_off_limit
641 diff_limit = None if fulldiff else self.cut_off_limit
642 c.a_rev, c.cs_rev, a_path, diff, st, op = diffs.wrapped_diff(filenode_old=node1,
642 c.a_rev, c.cs_rev, a_path, diff, st, op = diffs.wrapped_diff(filenode_old=node1,
643 filenode_new=node2,
643 filenode_new=node2,
644 diff_limit=diff_limit,
644 diff_limit=diff_limit,
645 ignore_whitespace=ignore_whitespace_diff,
645 ignore_whitespace=ignore_whitespace_diff,
646 line_context=diff_context_size,
646 line_context=diff_context_size,
647 enable_comments=False)
647 enable_comments=False)
648 c.file_diff_data = [(fid, fid, op, a_path, node2.path, diff, st)]
648 c.file_diff_data = [(fid, fid, op, a_path, node2.path, diff, st)]
649 return render('files/file_diff.html')
649 return render('files/file_diff.html')
650
650
651 @LoginRequired(allow_default_user=True)
651 @LoginRequired(allow_default_user=True)
652 @HasRepoPermissionLevelDecorator('read')
652 @HasRepoPermissionLevelDecorator('read')
653 def diff_2way(self, repo_name, f_path):
653 def diff_2way(self, repo_name, f_path):
654 diff1 = request.GET.get('diff1', '')
654 diff1 = request.GET.get('diff1', '')
655 diff2 = request.GET.get('diff2', '')
655 diff2 = request.GET.get('diff2', '')
656 try:
656 try:
657 if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]:
657 if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]:
658 c.changeset_1 = c.db_repo_scm_instance.get_changeset(diff1)
658 c.changeset_1 = c.db_repo_scm_instance.get_changeset(diff1)
659 try:
659 try:
660 node1 = c.changeset_1.get_node(f_path)
660 node1 = c.changeset_1.get_node(f_path)
661 if node1.is_dir():
661 if node1.is_dir():
662 raise NodeError('%s path is a %s not a file'
662 raise NodeError('%s path is a %s not a file'
663 % (node1, type(node1)))
663 % (node1, type(node1)))
664 except NodeDoesNotExistError:
664 except NodeDoesNotExistError:
665 c.changeset_1 = EmptyChangeset(cs=diff1,
665 c.changeset_1 = EmptyChangeset(cs=diff1,
666 revision=c.changeset_1.revision,
666 revision=c.changeset_1.revision,
667 repo=c.db_repo_scm_instance)
667 repo=c.db_repo_scm_instance)
668 node1 = FileNode(f_path, '', changeset=c.changeset_1)
668 node1 = FileNode(f_path, '', changeset=c.changeset_1)
669 else:
669 else:
670 c.changeset_1 = EmptyChangeset(repo=c.db_repo_scm_instance)
670 c.changeset_1 = EmptyChangeset(repo=c.db_repo_scm_instance)
671 node1 = FileNode(f_path, '', changeset=c.changeset_1)
671 node1 = FileNode(f_path, '', changeset=c.changeset_1)
672
672
673 if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]:
673 if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]:
674 c.changeset_2 = c.db_repo_scm_instance.get_changeset(diff2)
674 c.changeset_2 = c.db_repo_scm_instance.get_changeset(diff2)
675 try:
675 try:
676 node2 = c.changeset_2.get_node(f_path)
676 node2 = c.changeset_2.get_node(f_path)
677 if node2.is_dir():
677 if node2.is_dir():
678 raise NodeError('%s path is a %s not a file'
678 raise NodeError('%s path is a %s not a file'
679 % (node2, type(node2)))
679 % (node2, type(node2)))
680 except NodeDoesNotExistError:
680 except NodeDoesNotExistError:
681 c.changeset_2 = EmptyChangeset(cs=diff2,
681 c.changeset_2 = EmptyChangeset(cs=diff2,
682 revision=c.changeset_2.revision,
682 revision=c.changeset_2.revision,
683 repo=c.db_repo_scm_instance)
683 repo=c.db_repo_scm_instance)
684 node2 = FileNode(f_path, '', changeset=c.changeset_2)
684 node2 = FileNode(f_path, '', changeset=c.changeset_2)
685 else:
685 else:
686 c.changeset_2 = EmptyChangeset(repo=c.db_repo_scm_instance)
686 c.changeset_2 = EmptyChangeset(repo=c.db_repo_scm_instance)
687 node2 = FileNode(f_path, '', changeset=c.changeset_2)
687 node2 = FileNode(f_path, '', changeset=c.changeset_2)
688 except ChangesetDoesNotExistError as e:
688 except ChangesetDoesNotExistError as e:
689 msg = _('Such revision does not exist for this repository')
689 msg = _('Such revision does not exist for this repository')
690 h.flash(msg, category='error')
690 h.flash(msg, category='error')
691 raise HTTPNotFound()
691 raise HTTPNotFound()
692 c.node1 = node1
692 c.node1 = node1
693 c.node2 = node2
693 c.node2 = node2
694 c.cs1 = c.changeset_1
694 c.cs1 = c.changeset_1
695 c.cs2 = c.changeset_2
695 c.cs2 = c.changeset_2
696
696
697 return render('files/diff_2way.html')
697 return render('files/diff_2way.html')
698
698
699 def _get_node_history(self, cs, f_path, changesets=None):
699 def _get_node_history(self, cs, f_path, changesets=None):
700 """
700 """
701 get changesets history for given node
701 get changesets history for given node
702
702
703 :param cs: changeset to calculate history
703 :param cs: changeset to calculate history
704 :param f_path: path for node to calculate history for
704 :param f_path: path for node to calculate history for
705 :param changesets: if passed don't calculate history and take
705 :param changesets: if passed don't calculate history and take
706 changesets defined in this list
706 changesets defined in this list
707 """
707 """
708 # calculate history based on tip
708 # calculate history based on tip
709 tip_cs = c.db_repo_scm_instance.get_changeset()
709 tip_cs = c.db_repo_scm_instance.get_changeset()
710 if changesets is None:
710 if changesets is None:
711 try:
711 try:
712 changesets = tip_cs.get_file_history(f_path)
712 changesets = tip_cs.get_file_history(f_path)
713 except (NodeDoesNotExistError, ChangesetError):
713 except (NodeDoesNotExistError, ChangesetError):
714 # this node is not present at tip !
714 # this node is not present at tip !
715 changesets = cs.get_file_history(f_path)
715 changesets = cs.get_file_history(f_path)
716 hist_l = []
716 hist_l = []
717
717
718 changesets_group = ([], _("Changesets"))
718 changesets_group = ([], _("Changesets"))
719 branches_group = ([], _("Branches"))
719 branches_group = ([], _("Branches"))
720 tags_group = ([], _("Tags"))
720 tags_group = ([], _("Tags"))
721 for chs in changesets:
721 for chs in changesets:
722 # TODO: loop over chs.branches ... but that will not give all the bogus None branches for Git ...
722 # TODO: loop over chs.branches ... but that will not give all the bogus None branches for Git ...
723 _branch = chs.branch
723 _branch = chs.branch
724 n_desc = '%s (%s)' % (h.show_id(chs), _branch)
724 n_desc = '%s (%s)' % (h.show_id(chs), _branch)
725 changesets_group[0].append((chs.raw_id, n_desc,))
725 changesets_group[0].append((chs.raw_id, n_desc,))
726 hist_l.append(changesets_group)
726 hist_l.append(changesets_group)
727
727
728 for name, chs in c.db_repo_scm_instance.branches.items():
728 for name, chs in c.db_repo_scm_instance.branches.items():
729 branches_group[0].append((chs, name),)
729 branches_group[0].append((chs, name),)
730 hist_l.append(branches_group)
730 hist_l.append(branches_group)
731
731
732 for name, chs in c.db_repo_scm_instance.tags.items():
732 for name, chs in c.db_repo_scm_instance.tags.items():
733 tags_group[0].append((chs, name),)
733 tags_group[0].append((chs, name),)
734 hist_l.append(tags_group)
734 hist_l.append(tags_group)
735
735
736 return hist_l, changesets
736 return hist_l, changesets
737
737
738 @LoginRequired(allow_default_user=True)
738 @LoginRequired(allow_default_user=True)
739 @HasRepoPermissionLevelDecorator('read')
739 @HasRepoPermissionLevelDecorator('read')
740 @jsonify
740 @jsonify
741 def nodelist(self, repo_name, revision, f_path):
741 def nodelist(self, repo_name, revision, f_path):
742 if request.environ.get('HTTP_X_PARTIAL_XHR'):
742 if request.environ.get('HTTP_X_PARTIAL_XHR'):
743 cs = self.__get_cs(revision)
743 cs = self.__get_cs(revision)
744 _d, _f = ScmModel().get_nodes(repo_name, cs.raw_id, f_path,
744 _d, _f = ScmModel().get_nodes(repo_name, cs.raw_id, f_path,
745 flat=False)
745 flat=False)
746 return {'nodes': _d + _f}
746 return {'nodes': _d + _f}
@@ -1,1392 +1,1389 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 Helper functions
15 Helper functions
16
16
17 Consists of functions to typically be used within templates, but also
17 Consists of functions to typically be used within templates, but also
18 available to Controllers. This module is available to both as 'h'.
18 available to Controllers. This module is available to both as 'h'.
19 """
19 """
20 import hashlib
20 import hashlib
21 import json
21 import json
22 import logging
22 import logging
23 import random
23 import random
24 import re
24 import re
25 import textwrap
25 import textwrap
26 import urllib.parse
26 import urllib.parse
27
27
28 from beaker.cache import cache_region
28 from beaker.cache import cache_region
29 from pygments import highlight as code_highlight
29 from pygments import highlight as code_highlight
30 from pygments.formatters.html import HtmlFormatter
30 from pygments.formatters.html import HtmlFormatter
31 from tg.i18n import ugettext as _
31 from tg.i18n import ugettext as _
32 from webhelpers2.html import HTML, escape, literal
32 from webhelpers2.html import HTML, escape, literal
33 from webhelpers2.html.tags import NotGiven, Option, Options, _input, _make_safe_id_component, checkbox, end_form
33 from webhelpers2.html.tags import NotGiven, Option, Options, _input, _make_safe_id_component, checkbox, end_form
34 from webhelpers2.html.tags import form as insecure_form
34 from webhelpers2.html.tags import form as insecure_form
35 from webhelpers2.html.tags import hidden, link_to, password, radio
35 from webhelpers2.html.tags import hidden, link_to, password, radio
36 from webhelpers2.html.tags import select as webhelpers2_select
36 from webhelpers2.html.tags import select as webhelpers2_select
37 from webhelpers2.html.tags import submit, text, textarea
37 from webhelpers2.html.tags import submit, text, textarea
38 from webhelpers2.number import format_byte_size
38 from webhelpers2.number import format_byte_size
39 from webhelpers2.text import chop_at, truncate, wrap_paragraphs
39 from webhelpers2.text import chop_at, truncate, wrap_paragraphs
40
40
41 import kallithea
41 from kallithea.config.routing import url
42 from kallithea.config.routing import url
42 from kallithea.lib.annotate import annotate_highlight
43 from kallithea.lib.annotate import annotate_highlight
43 #==============================================================================
44 #==============================================================================
44 # PERMS
45 # PERMS
45 #==============================================================================
46 #==============================================================================
46 from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoPermissionLevel
47 from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoPermissionLevel
47 from kallithea.lib.markup_renderer import url_re
48 from kallithea.lib.markup_renderer import url_re
48 from kallithea.lib.pygmentsutils import get_custom_lexer
49 from kallithea.lib.pygmentsutils import get_custom_lexer
49 from kallithea.lib.utils2 import MENTIONS_REGEX, AttributeDict
50 from kallithea.lib.utils2 import MENTIONS_REGEX, AttributeDict
50 from kallithea.lib.utils2 import age as _age
51 from kallithea.lib.utils2 import age as _age
51 from kallithea.lib.utils2 import asbool, credentials_filter, safe_bytes, safe_int, safe_str, time_to_datetime
52 from kallithea.lib.utils2 import asbool, credentials_filter, safe_bytes, safe_int, safe_str, time_to_datetime
52 from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
53 from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
53 from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError
54 from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError
54 #==============================================================================
55 #==============================================================================
55 # SCM FILTERS available via h.
56 # SCM FILTERS available via h.
56 #==============================================================================
57 #==============================================================================
57 from kallithea.lib.vcs.utils import author_email, author_name
58 from kallithea.lib.vcs.utils import author_email, author_name
58
59
59
60
60 # mute pyflakes "imported but unused"
61 # mute pyflakes "imported but unused"
61 assert Option
62 assert Option
62 assert checkbox
63 assert checkbox
63 assert end_form
64 assert end_form
64 assert password
65 assert password
65 assert radio
66 assert radio
66 assert submit
67 assert submit
67 assert text
68 assert text
68 assert textarea
69 assert textarea
69 assert format_byte_size
70 assert format_byte_size
70 assert chop_at
71 assert chop_at
71 assert wrap_paragraphs
72 assert wrap_paragraphs
72 assert HasPermissionAny
73 assert HasPermissionAny
73 assert HasRepoGroupPermissionLevel
74 assert HasRepoGroupPermissionLevel
74 assert HasRepoPermissionLevel
75 assert HasRepoPermissionLevel
75 assert time_to_datetime
76 assert time_to_datetime
76 assert EmptyChangeset
77 assert EmptyChangeset
77
78
78
79
79 log = logging.getLogger(__name__)
80 log = logging.getLogger(__name__)
80
81
81
82
82 def canonical_url(*args, **kargs):
83 def canonical_url(*args, **kargs):
83 '''Like url(x, qualified=True), but returns url that not only is qualified
84 '''Like url(x, qualified=True), but returns url that not only is qualified
84 but also canonical, as configured in canonical_url'''
85 but also canonical, as configured in canonical_url'''
85 from kallithea import CONFIG
86 try:
86 try:
87 parts = CONFIG.get('canonical_url', '').split('://', 1)
87 parts = kallithea.CONFIG.get('canonical_url', '').split('://', 1)
88 kargs['host'] = parts[1]
88 kargs['host'] = parts[1]
89 kargs['protocol'] = parts[0]
89 kargs['protocol'] = parts[0]
90 except IndexError:
90 except IndexError:
91 kargs['qualified'] = True
91 kargs['qualified'] = True
92 return url(*args, **kargs)
92 return url(*args, **kargs)
93
93
94
94
95 def canonical_hostname():
95 def canonical_hostname():
96 '''Return canonical hostname of system'''
96 '''Return canonical hostname of system'''
97 from kallithea import CONFIG
98 try:
97 try:
99 parts = CONFIG.get('canonical_url', '').split('://', 1)
98 parts = kallithea.CONFIG.get('canonical_url', '').split('://', 1)
100 return parts[1].split('/', 1)[0]
99 return parts[1].split('/', 1)[0]
101 except IndexError:
100 except IndexError:
102 parts = url('home', qualified=True).split('://', 1)
101 parts = url('home', qualified=True).split('://', 1)
103 return parts[1].split('/', 1)[0]
102 return parts[1].split('/', 1)[0]
104
103
105
104
106 def html_escape(s):
105 def html_escape(s):
107 """Return string with all html escaped.
106 """Return string with all html escaped.
108 This is also safe for javascript in html but not necessarily correct.
107 This is also safe for javascript in html but not necessarily correct.
109 """
108 """
110 return (s
109 return (s
111 .replace('&', '&amp;')
110 .replace('&', '&amp;')
112 .replace(">", "&gt;")
111 .replace(">", "&gt;")
113 .replace("<", "&lt;")
112 .replace("<", "&lt;")
114 .replace('"', "&quot;")
113 .replace('"', "&quot;")
115 .replace("'", "&apos;") # Note: this is HTML5 not HTML4 and might not work in mails
114 .replace("'", "&apos;") # Note: this is HTML5 not HTML4 and might not work in mails
116 )
115 )
117
116
118 def js(value):
117 def js(value):
119 """Convert Python value to the corresponding JavaScript representation.
118 """Convert Python value to the corresponding JavaScript representation.
120
119
121 This is necessary to safely insert arbitrary values into HTML <script>
120 This is necessary to safely insert arbitrary values into HTML <script>
122 sections e.g. using Mako template expression substitution.
121 sections e.g. using Mako template expression substitution.
123
122
124 Note: Rather than using this function, it's preferable to avoid the
123 Note: Rather than using this function, it's preferable to avoid the
125 insertion of values into HTML <script> sections altogether. Instead,
124 insertion of values into HTML <script> sections altogether. Instead,
126 data should (to the extent possible) be passed to JavaScript using
125 data should (to the extent possible) be passed to JavaScript using
127 data attributes or AJAX calls, eliminating the need for JS specific
126 data attributes or AJAX calls, eliminating the need for JS specific
128 escaping.
127 escaping.
129
128
130 Note: This is not safe for use in attributes (e.g. onclick), because
129 Note: This is not safe for use in attributes (e.g. onclick), because
131 quotes are not escaped.
130 quotes are not escaped.
132
131
133 Because the rules for parsing <script> varies between XHTML (where
132 Because the rules for parsing <script> varies between XHTML (where
134 normal rules apply for any special characters) and HTML (where
133 normal rules apply for any special characters) and HTML (where
135 entities are not interpreted, but the literal string "</script>"
134 entities are not interpreted, but the literal string "</script>"
136 is forbidden), the function ensures that the result never contains
135 is forbidden), the function ensures that the result never contains
137 '&', '<' and '>', thus making it safe in both those contexts (but
136 '&', '<' and '>', thus making it safe in both those contexts (but
138 not in attributes).
137 not in attributes).
139 """
138 """
140 return literal(
139 return literal(
141 ('(' + json.dumps(value) + ')')
140 ('(' + json.dumps(value) + ')')
142 # In JSON, the following can only appear in string literals.
141 # In JSON, the following can only appear in string literals.
143 .replace('&', r'\x26')
142 .replace('&', r'\x26')
144 .replace('<', r'\x3c')
143 .replace('<', r'\x3c')
145 .replace('>', r'\x3e')
144 .replace('>', r'\x3e')
146 )
145 )
147
146
148
147
149 def jshtml(val):
148 def jshtml(val):
150 """HTML escapes a string value, then converts the resulting string
149 """HTML escapes a string value, then converts the resulting string
151 to its corresponding JavaScript representation (see `js`).
150 to its corresponding JavaScript representation (see `js`).
152
151
153 This is used when a plain-text string (possibly containing special
152 This is used when a plain-text string (possibly containing special
154 HTML characters) will be used by a script in an HTML context (e.g.
153 HTML characters) will be used by a script in an HTML context (e.g.
155 element.innerHTML or jQuery's 'html' method).
154 element.innerHTML or jQuery's 'html' method).
156
155
157 If in doubt, err on the side of using `jshtml` over `js`, since it's
156 If in doubt, err on the side of using `jshtml` over `js`, since it's
158 better to escape too much than too little.
157 better to escape too much than too little.
159 """
158 """
160 return js(escape(val))
159 return js(escape(val))
161
160
162
161
163 def shorter(s, size=20, firstline=False, postfix='...'):
162 def shorter(s, size=20, firstline=False, postfix='...'):
164 """Truncate s to size, including the postfix string if truncating.
163 """Truncate s to size, including the postfix string if truncating.
165 If firstline, truncate at newline.
164 If firstline, truncate at newline.
166 """
165 """
167 if firstline:
166 if firstline:
168 s = s.split('\n', 1)[0].rstrip()
167 s = s.split('\n', 1)[0].rstrip()
169 if len(s) > size:
168 if len(s) > size:
170 return s[:size - len(postfix)] + postfix
169 return s[:size - len(postfix)] + postfix
171 return s
170 return s
172
171
173
172
174 def reset(name, value, id=NotGiven, **attrs):
173 def reset(name, value, id=NotGiven, **attrs):
175 """Create a reset button, similar to webhelpers2.html.tags.submit ."""
174 """Create a reset button, similar to webhelpers2.html.tags.submit ."""
176 return _input("reset", name, value, id, attrs)
175 return _input("reset", name, value, id, attrs)
177
176
178
177
179 def select(name, selected_values, options, id=NotGiven, **attrs):
178 def select(name, selected_values, options, id=NotGiven, **attrs):
180 """Convenient wrapper of webhelpers2 to let it accept options as a tuple list"""
179 """Convenient wrapper of webhelpers2 to let it accept options as a tuple list"""
181 if isinstance(options, list):
180 if isinstance(options, list):
182 option_list = options
181 option_list = options
183 # Handle old value,label lists ... where value also can be value,label lists
182 # Handle old value,label lists ... where value also can be value,label lists
184 options = Options()
183 options = Options()
185 for x in option_list:
184 for x in option_list:
186 if isinstance(x, tuple) and len(x) == 2:
185 if isinstance(x, tuple) and len(x) == 2:
187 value, label = x
186 value, label = x
188 elif isinstance(x, str):
187 elif isinstance(x, str):
189 value = label = x
188 value = label = x
190 else:
189 else:
191 log.error('invalid select option %r', x)
190 log.error('invalid select option %r', x)
192 raise
191 raise
193 if isinstance(value, list):
192 if isinstance(value, list):
194 og = options.add_optgroup(label)
193 og = options.add_optgroup(label)
195 for x in value:
194 for x in value:
196 if isinstance(x, tuple) and len(x) == 2:
195 if isinstance(x, tuple) and len(x) == 2:
197 group_value, group_label = x
196 group_value, group_label = x
198 elif isinstance(x, str):
197 elif isinstance(x, str):
199 group_value = group_label = x
198 group_value = group_label = x
200 else:
199 else:
201 log.error('invalid select option %r', x)
200 log.error('invalid select option %r', x)
202 raise
201 raise
203 og.add_option(group_label, group_value)
202 og.add_option(group_label, group_value)
204 else:
203 else:
205 options.add_option(label, value)
204 options.add_option(label, value)
206 return webhelpers2_select(name, selected_values, options, id=id, **attrs)
205 return webhelpers2_select(name, selected_values, options, id=id, **attrs)
207
206
208
207
209 safeid = _make_safe_id_component
208 safeid = _make_safe_id_component
210
209
211
210
212 def FID(raw_id, path):
211 def FID(raw_id, path):
213 """
212 """
214 Creates a unique ID for filenode based on it's hash of path and revision
213 Creates a unique ID for filenode based on it's hash of path and revision
215 it's safe to use in urls
214 it's safe to use in urls
216 """
215 """
217 return 'C-%s-%s' % (short_id(raw_id), hashlib.md5(safe_bytes(path)).hexdigest()[:12])
216 return 'C-%s-%s' % (short_id(raw_id), hashlib.md5(safe_bytes(path)).hexdigest()[:12])
218
217
219
218
220 def get_ignore_whitespace_diff(GET):
219 def get_ignore_whitespace_diff(GET):
221 """Return true if URL requested whitespace to be ignored"""
220 """Return true if URL requested whitespace to be ignored"""
222 return bool(GET.get('ignorews'))
221 return bool(GET.get('ignorews'))
223
222
224
223
225 def ignore_whitespace_link(GET, anchor=None):
224 def ignore_whitespace_link(GET, anchor=None):
226 """Return snippet with link to current URL with whitespace ignoring toggled"""
225 """Return snippet with link to current URL with whitespace ignoring toggled"""
227 params = dict(GET) # ignoring duplicates
226 params = dict(GET) # ignoring duplicates
228 if get_ignore_whitespace_diff(GET):
227 if get_ignore_whitespace_diff(GET):
229 params.pop('ignorews')
228 params.pop('ignorews')
230 title = _("Show whitespace changes")
229 title = _("Show whitespace changes")
231 else:
230 else:
232 params['ignorews'] = '1'
231 params['ignorews'] = '1'
233 title = _("Ignore whitespace changes")
232 title = _("Ignore whitespace changes")
234 params['anchor'] = anchor
233 params['anchor'] = anchor
235 return link_to(
234 return link_to(
236 literal('<i class="icon-strike"></i>'),
235 literal('<i class="icon-strike"></i>'),
237 url.current(**params),
236 url.current(**params),
238 title=title,
237 title=title,
239 **{'data-toggle': 'tooltip'})
238 **{'data-toggle': 'tooltip'})
240
239
241
240
242 def get_diff_context_size(GET):
241 def get_diff_context_size(GET):
243 """Return effective context size requested in URL"""
242 """Return effective context size requested in URL"""
244 return safe_int(GET.get('context'), default=3)
243 return safe_int(GET.get('context'), default=3)
245
244
246
245
247 def increase_context_link(GET, anchor=None):
246 def increase_context_link(GET, anchor=None):
248 """Return snippet with link to current URL with double context size"""
247 """Return snippet with link to current URL with double context size"""
249 context = get_diff_context_size(GET) * 2
248 context = get_diff_context_size(GET) * 2
250 params = dict(GET) # ignoring duplicates
249 params = dict(GET) # ignoring duplicates
251 params['context'] = str(context)
250 params['context'] = str(context)
252 params['anchor'] = anchor
251 params['anchor'] = anchor
253 return link_to(
252 return link_to(
254 literal('<i class="icon-sort"></i>'),
253 literal('<i class="icon-sort"></i>'),
255 url.current(**params),
254 url.current(**params),
256 title=_('Increase diff context to %(num)s lines') % {'num': context},
255 title=_('Increase diff context to %(num)s lines') % {'num': context},
257 **{'data-toggle': 'tooltip'})
256 **{'data-toggle': 'tooltip'})
258
257
259
258
260 class _FilesBreadCrumbs(object):
259 class _FilesBreadCrumbs(object):
261
260
262 def __call__(self, repo_name, rev, paths):
261 def __call__(self, repo_name, rev, paths):
263 url_l = [link_to(repo_name, url('files_home',
262 url_l = [link_to(repo_name, url('files_home',
264 repo_name=repo_name,
263 repo_name=repo_name,
265 revision=rev, f_path=''),
264 revision=rev, f_path=''),
266 class_='ypjax-link')]
265 class_='ypjax-link')]
267 paths_l = paths.split('/')
266 paths_l = paths.split('/')
268 for cnt, p in enumerate(paths_l):
267 for cnt, p in enumerate(paths_l):
269 if p != '':
268 if p != '':
270 url_l.append(link_to(p,
269 url_l.append(link_to(p,
271 url('files_home',
270 url('files_home',
272 repo_name=repo_name,
271 repo_name=repo_name,
273 revision=rev,
272 revision=rev,
274 f_path='/'.join(paths_l[:cnt + 1])
273 f_path='/'.join(paths_l[:cnt + 1])
275 ),
274 ),
276 class_='ypjax-link'
275 class_='ypjax-link'
277 )
276 )
278 )
277 )
279
278
280 return literal('/'.join(url_l))
279 return literal('/'.join(url_l))
281
280
282
281
283 files_breadcrumbs = _FilesBreadCrumbs()
282 files_breadcrumbs = _FilesBreadCrumbs()
284
283
285
284
286 class CodeHtmlFormatter(HtmlFormatter):
285 class CodeHtmlFormatter(HtmlFormatter):
287 """
286 """
288 My code Html Formatter for source codes
287 My code Html Formatter for source codes
289 """
288 """
290
289
291 def wrap(self, source, outfile):
290 def wrap(self, source, outfile):
292 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
291 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
293
292
294 def _wrap_code(self, source):
293 def _wrap_code(self, source):
295 for cnt, it in enumerate(source):
294 for cnt, it in enumerate(source):
296 i, t = it
295 i, t = it
297 t = '<span id="L%s">%s</span>' % (cnt + 1, t)
296 t = '<span id="L%s">%s</span>' % (cnt + 1, t)
298 yield i, t
297 yield i, t
299
298
300 def _wrap_tablelinenos(self, inner):
299 def _wrap_tablelinenos(self, inner):
301 inner_lines = []
300 inner_lines = []
302 lncount = 0
301 lncount = 0
303 for t, line in inner:
302 for t, line in inner:
304 if t:
303 if t:
305 lncount += 1
304 lncount += 1
306 inner_lines.append(line)
305 inner_lines.append(line)
307
306
308 fl = self.linenostart
307 fl = self.linenostart
309 mw = len(str(lncount + fl - 1))
308 mw = len(str(lncount + fl - 1))
310 sp = self.linenospecial
309 sp = self.linenospecial
311 st = self.linenostep
310 st = self.linenostep
312 la = self.lineanchors
311 la = self.lineanchors
313 aln = self.anchorlinenos
312 aln = self.anchorlinenos
314 nocls = self.noclasses
313 nocls = self.noclasses
315 if sp:
314 if sp:
316 lines = []
315 lines = []
317
316
318 for i in range(fl, fl + lncount):
317 for i in range(fl, fl + lncount):
319 if i % st == 0:
318 if i % st == 0:
320 if i % sp == 0:
319 if i % sp == 0:
321 if aln:
320 if aln:
322 lines.append('<a href="#%s%d" class="special">%*d</a>' %
321 lines.append('<a href="#%s%d" class="special">%*d</a>' %
323 (la, i, mw, i))
322 (la, i, mw, i))
324 else:
323 else:
325 lines.append('<span class="special">%*d</span>' % (mw, i))
324 lines.append('<span class="special">%*d</span>' % (mw, i))
326 else:
325 else:
327 if aln:
326 if aln:
328 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
327 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
329 else:
328 else:
330 lines.append('%*d' % (mw, i))
329 lines.append('%*d' % (mw, i))
331 else:
330 else:
332 lines.append('')
331 lines.append('')
333 ls = '\n'.join(lines)
332 ls = '\n'.join(lines)
334 else:
333 else:
335 lines = []
334 lines = []
336 for i in range(fl, fl + lncount):
335 for i in range(fl, fl + lncount):
337 if i % st == 0:
336 if i % st == 0:
338 if aln:
337 if aln:
339 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
338 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
340 else:
339 else:
341 lines.append('%*d' % (mw, i))
340 lines.append('%*d' % (mw, i))
342 else:
341 else:
343 lines.append('')
342 lines.append('')
344 ls = '\n'.join(lines)
343 ls = '\n'.join(lines)
345
344
346 # in case you wonder about the seemingly redundant <div> here: since the
345 # in case you wonder about the seemingly redundant <div> here: since the
347 # content in the other cell also is wrapped in a div, some browsers in
346 # content in the other cell also is wrapped in a div, some browsers in
348 # some configurations seem to mess up the formatting...
347 # some configurations seem to mess up the formatting...
349 if nocls:
348 if nocls:
350 yield 0, ('<table class="%stable">' % self.cssclass +
349 yield 0, ('<table class="%stable">' % self.cssclass +
351 '<tr><td><div class="linenodiv">'
350 '<tr><td><div class="linenodiv">'
352 '<pre>' + ls + '</pre></div></td>'
351 '<pre>' + ls + '</pre></div></td>'
353 '<td id="hlcode" class="code">')
352 '<td id="hlcode" class="code">')
354 else:
353 else:
355 yield 0, ('<table class="%stable">' % self.cssclass +
354 yield 0, ('<table class="%stable">' % self.cssclass +
356 '<tr><td class="linenos"><div class="linenodiv">'
355 '<tr><td class="linenos"><div class="linenodiv">'
357 '<pre>' + ls + '</pre></div></td>'
356 '<pre>' + ls + '</pre></div></td>'
358 '<td id="hlcode" class="code">')
357 '<td id="hlcode" class="code">')
359 yield 0, ''.join(inner_lines)
358 yield 0, ''.join(inner_lines)
360 yield 0, '</td></tr></table>'
359 yield 0, '</td></tr></table>'
361
360
362
361
363 _whitespace_re = re.compile(r'(\t)|( )(?=\n|</div>)')
362 _whitespace_re = re.compile(r'(\t)|( )(?=\n|</div>)')
364
363
365
364
366 def _markup_whitespace(m):
365 def _markup_whitespace(m):
367 groups = m.groups()
366 groups = m.groups()
368 if groups[0]:
367 if groups[0]:
369 return '<u>\t</u>'
368 return '<u>\t</u>'
370 if groups[1]:
369 if groups[1]:
371 return ' <i></i>'
370 return ' <i></i>'
372
371
373
372
374 def markup_whitespace(s):
373 def markup_whitespace(s):
375 return _whitespace_re.sub(_markup_whitespace, s)
374 return _whitespace_re.sub(_markup_whitespace, s)
376
375
377
376
378 def pygmentize(filenode, **kwargs):
377 def pygmentize(filenode, **kwargs):
379 """
378 """
380 pygmentize function using pygments
379 pygmentize function using pygments
381
380
382 :param filenode:
381 :param filenode:
383 """
382 """
384 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
383 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
385 return literal(markup_whitespace(
384 return literal(markup_whitespace(
386 code_highlight(safe_str(filenode.content), lexer, CodeHtmlFormatter(**kwargs))))
385 code_highlight(safe_str(filenode.content), lexer, CodeHtmlFormatter(**kwargs))))
387
386
388
387
389 def hsv_to_rgb(h, s, v):
388 def hsv_to_rgb(h, s, v):
390 if s == 0.0:
389 if s == 0.0:
391 return v, v, v
390 return v, v, v
392 i = int(h * 6.0) # XXX assume int() truncates!
391 i = int(h * 6.0) # XXX assume int() truncates!
393 f = (h * 6.0) - i
392 f = (h * 6.0) - i
394 p = v * (1.0 - s)
393 p = v * (1.0 - s)
395 q = v * (1.0 - s * f)
394 q = v * (1.0 - s * f)
396 t = v * (1.0 - s * (1.0 - f))
395 t = v * (1.0 - s * (1.0 - f))
397 i = i % 6
396 i = i % 6
398 if i == 0:
397 if i == 0:
399 return v, t, p
398 return v, t, p
400 if i == 1:
399 if i == 1:
401 return q, v, p
400 return q, v, p
402 if i == 2:
401 if i == 2:
403 return p, v, t
402 return p, v, t
404 if i == 3:
403 if i == 3:
405 return p, q, v
404 return p, q, v
406 if i == 4:
405 if i == 4:
407 return t, p, v
406 return t, p, v
408 if i == 5:
407 if i == 5:
409 return v, p, q
408 return v, p, q
410
409
411
410
412 def gen_color(n=10000):
411 def gen_color(n=10000):
413 """generator for getting n of evenly distributed colors using
412 """generator for getting n of evenly distributed colors using
414 hsv color and golden ratio. It always return same order of colors
413 hsv color and golden ratio. It always return same order of colors
415
414
416 :returns: RGB tuple
415 :returns: RGB tuple
417 """
416 """
418
417
419 golden_ratio = 0.618033988749895
418 golden_ratio = 0.618033988749895
420 h = 0.22717784590367374
419 h = 0.22717784590367374
421
420
422 for _unused in range(n):
421 for _unused in range(n):
423 h += golden_ratio
422 h += golden_ratio
424 h %= 1
423 h %= 1
425 HSV_tuple = [h, 0.95, 0.95]
424 HSV_tuple = [h, 0.95, 0.95]
426 RGB_tuple = hsv_to_rgb(*HSV_tuple)
425 RGB_tuple = hsv_to_rgb(*HSV_tuple)
427 yield [str(int(x * 256)) for x in RGB_tuple]
426 yield [str(int(x * 256)) for x in RGB_tuple]
428
427
429
428
430 def pygmentize_annotation(repo_name, filenode, **kwargs):
429 def pygmentize_annotation(repo_name, filenode, **kwargs):
431 """
430 """
432 pygmentize function for annotation
431 pygmentize function for annotation
433
432
434 :param filenode:
433 :param filenode:
435 """
434 """
436 cgenerator = gen_color()
435 cgenerator = gen_color()
437 color_dict = {}
436 color_dict = {}
438
437
439 def get_color_string(cs):
438 def get_color_string(cs):
440 if cs in color_dict:
439 if cs in color_dict:
441 col = color_dict[cs]
440 col = color_dict[cs]
442 else:
441 else:
443 col = color_dict[cs] = next(cgenerator)
442 col = color_dict[cs] = next(cgenerator)
444 return "color: rgb(%s)! important;" % (', '.join(col))
443 return "color: rgb(%s)! important;" % (', '.join(col))
445
444
446 def url_func(changeset):
445 def url_func(changeset):
447 author = escape(changeset.author)
446 author = escape(changeset.author)
448 date = changeset.date
447 date = changeset.date
449 message = escape(changeset.message)
448 message = escape(changeset.message)
450 tooltip_html = ("<b>Author:</b> %s<br/>"
449 tooltip_html = ("<b>Author:</b> %s<br/>"
451 "<b>Date:</b> %s</b><br/>"
450 "<b>Date:</b> %s</b><br/>"
452 "<b>Message:</b> %s") % (author, date, message)
451 "<b>Message:</b> %s") % (author, date, message)
453
452
454 lnk_format = show_id(changeset)
453 lnk_format = show_id(changeset)
455 uri = link_to(
454 uri = link_to(
456 lnk_format,
455 lnk_format,
457 url('changeset_home', repo_name=repo_name,
456 url('changeset_home', repo_name=repo_name,
458 revision=changeset.raw_id),
457 revision=changeset.raw_id),
459 style=get_color_string(changeset.raw_id),
458 style=get_color_string(changeset.raw_id),
460 **{'data-toggle': 'popover',
459 **{'data-toggle': 'popover',
461 'data-content': tooltip_html}
460 'data-content': tooltip_html}
462 )
461 )
463
462
464 uri += '\n'
463 uri += '\n'
465 return uri
464 return uri
466
465
467 return literal(markup_whitespace(annotate_highlight(filenode, url_func, **kwargs)))
466 return literal(markup_whitespace(annotate_highlight(filenode, url_func, **kwargs)))
468
467
469
468
470 class _Message(object):
469 class _Message(object):
471 """A message returned by ``pop_flash_messages()``.
470 """A message returned by ``pop_flash_messages()``.
472
471
473 Converting the message to a string returns the message text. Instances
472 Converting the message to a string returns the message text. Instances
474 also have the following attributes:
473 also have the following attributes:
475
474
476 * ``category``: the category specified when the message was created.
475 * ``category``: the category specified when the message was created.
477 * ``message``: the html-safe message text.
476 * ``message``: the html-safe message text.
478 """
477 """
479
478
480 def __init__(self, category, message):
479 def __init__(self, category, message):
481 self.category = category
480 self.category = category
482 self.message = message
481 self.message = message
483
482
484
483
485 def _session_flash_messages(append=None, clear=False):
484 def _session_flash_messages(append=None, clear=False):
486 """Manage a message queue in tg.session: return the current message queue
485 """Manage a message queue in tg.session: return the current message queue
487 after appending the given message, and possibly clearing the queue."""
486 after appending the given message, and possibly clearing the queue."""
488 key = 'flash'
487 key = 'flash'
489 from tg import session
488 from tg import session
490 if key in session:
489 if key in session:
491 flash_messages = session[key]
490 flash_messages = session[key]
492 else:
491 else:
493 if append is None: # common fast path - also used for clearing empty queue
492 if append is None: # common fast path - also used for clearing empty queue
494 return [] # don't bother saving
493 return [] # don't bother saving
495 flash_messages = []
494 flash_messages = []
496 session[key] = flash_messages
495 session[key] = flash_messages
497 if append is not None and append not in flash_messages:
496 if append is not None and append not in flash_messages:
498 flash_messages.append(append)
497 flash_messages.append(append)
499 if clear:
498 if clear:
500 session.pop(key, None)
499 session.pop(key, None)
501 session.save()
500 session.save()
502 return flash_messages
501 return flash_messages
503
502
504
503
505 def flash(message, category, logf=None):
504 def flash(message, category, logf=None):
506 """
505 """
507 Show a message to the user _and_ log it through the specified function
506 Show a message to the user _and_ log it through the specified function
508
507
509 category: notice (default), warning, error, success
508 category: notice (default), warning, error, success
510 logf: a custom log function - such as log.debug
509 logf: a custom log function - such as log.debug
511
510
512 logf defaults to log.info, unless category equals 'success', in which
511 logf defaults to log.info, unless category equals 'success', in which
513 case logf defaults to log.debug.
512 case logf defaults to log.debug.
514 """
513 """
515 assert category in ('error', 'success', 'warning'), category
514 assert category in ('error', 'success', 'warning'), category
516 if hasattr(message, '__html__'):
515 if hasattr(message, '__html__'):
517 # render to HTML for storing in cookie
516 # render to HTML for storing in cookie
518 safe_message = str(message)
517 safe_message = str(message)
519 else:
518 else:
520 # Apply str - the message might be an exception with __str__
519 # Apply str - the message might be an exception with __str__
521 # Escape, so we can trust the result without further escaping, without any risk of injection
520 # Escape, so we can trust the result without further escaping, without any risk of injection
522 safe_message = html_escape(str(message))
521 safe_message = html_escape(str(message))
523 if logf is None:
522 if logf is None:
524 logf = log.info
523 logf = log.info
525 if category == 'success':
524 if category == 'success':
526 logf = log.debug
525 logf = log.debug
527
526
528 logf('Flash %s: %s', category, safe_message)
527 logf('Flash %s: %s', category, safe_message)
529
528
530 _session_flash_messages(append=(category, safe_message))
529 _session_flash_messages(append=(category, safe_message))
531
530
532
531
533 def pop_flash_messages():
532 def pop_flash_messages():
534 """Return all accumulated messages and delete them from the session.
533 """Return all accumulated messages and delete them from the session.
535
534
536 The return value is a list of ``Message`` objects.
535 The return value is a list of ``Message`` objects.
537 """
536 """
538 return [_Message(category, message) for category, message in _session_flash_messages(clear=True)]
537 return [_Message(category, message) for category, message in _session_flash_messages(clear=True)]
539
538
540
539
541 def age(x, y=False):
540 def age(x, y=False):
542 return _age(x, y)
541 return _age(x, y)
543
542
544 def capitalize(x):
543 def capitalize(x):
545 return x.capitalize()
544 return x.capitalize()
546
545
547 email = author_email
546 email = author_email
548
547
549 def short_id(x):
548 def short_id(x):
550 return x[:12]
549 return x[:12]
551
550
552 def hide_credentials(x):
551 def hide_credentials(x):
553 return ''.join(credentials_filter(x))
552 return ''.join(credentials_filter(x))
554
553
555
554
556 def show_id(cs):
555 def show_id(cs):
557 """
556 """
558 Configurable function that shows ID
557 Configurable function that shows ID
559 by default it's r123:fffeeefffeee
558 by default it's r123:fffeeefffeee
560
559
561 :param cs: changeset instance
560 :param cs: changeset instance
562 """
561 """
563 from kallithea import CONFIG
562 def_len = safe_int(kallithea.CONFIG.get('show_sha_length', 12))
564 def_len = safe_int(CONFIG.get('show_sha_length', 12))
563 show_rev = asbool(kallithea.CONFIG.get('show_revision_number', False))
565 show_rev = asbool(CONFIG.get('show_revision_number', False))
566
564
567 raw_id = cs.raw_id[:def_len]
565 raw_id = cs.raw_id[:def_len]
568 if show_rev:
566 if show_rev:
569 return 'r%s:%s' % (cs.revision, raw_id)
567 return 'r%s:%s' % (cs.revision, raw_id)
570 else:
568 else:
571 return raw_id
569 return raw_id
572
570
573
571
574 def fmt_date(date):
572 def fmt_date(date):
575 if date:
573 if date:
576 return date.strftime("%Y-%m-%d %H:%M:%S")
574 return date.strftime("%Y-%m-%d %H:%M:%S")
577 return ""
575 return ""
578
576
579
577
580 def is_git(repository):
578 def is_git(repository):
581 if hasattr(repository, 'alias'):
579 if hasattr(repository, 'alias'):
582 _type = repository.alias
580 _type = repository.alias
583 elif hasattr(repository, 'repo_type'):
581 elif hasattr(repository, 'repo_type'):
584 _type = repository.repo_type
582 _type = repository.repo_type
585 else:
583 else:
586 _type = repository
584 _type = repository
587 return _type == 'git'
585 return _type == 'git'
588
586
589
587
590 def is_hg(repository):
588 def is_hg(repository):
591 if hasattr(repository, 'alias'):
589 if hasattr(repository, 'alias'):
592 _type = repository.alias
590 _type = repository.alias
593 elif hasattr(repository, 'repo_type'):
591 elif hasattr(repository, 'repo_type'):
594 _type = repository.repo_type
592 _type = repository.repo_type
595 else:
593 else:
596 _type = repository
594 _type = repository
597 return _type == 'hg'
595 return _type == 'hg'
598
596
599
597
600 @cache_region('long_term', 'user_attr_or_none')
598 @cache_region('long_term', 'user_attr_or_none')
601 def user_attr_or_none(author, show_attr):
599 def user_attr_or_none(author, show_attr):
602 """Try to match email part of VCS committer string with a local user and return show_attr
600 """Try to match email part of VCS committer string with a local user and return show_attr
603 - or return None if user not found"""
601 - or return None if user not found"""
604 email = author_email(author)
602 email = author_email(author)
605 if email:
603 if email:
606 from kallithea.model.db import User
604 from kallithea.model.db import User
607 user = User.get_by_email(email)
605 user = User.get_by_email(email)
608 if user is not None:
606 if user is not None:
609 return getattr(user, show_attr)
607 return getattr(user, show_attr)
610 return None
608 return None
611
609
612
610
613 def email_or_none(author):
611 def email_or_none(author):
614 """Try to match email part of VCS committer string with a local user.
612 """Try to match email part of VCS committer string with a local user.
615 Return primary email of user, email part of the specified author name, or None."""
613 Return primary email of user, email part of the specified author name, or None."""
616 if not author:
614 if not author:
617 return None
615 return None
618 email = user_attr_or_none(author, 'email')
616 email = user_attr_or_none(author, 'email')
619 if email is not None:
617 if email is not None:
620 return email # always use user's main email address - not necessarily the one used to find user
618 return email # always use user's main email address - not necessarily the one used to find user
621
619
622 # extract email from the commit string
620 # extract email from the commit string
623 email = author_email(author)
621 email = author_email(author)
624 if email:
622 if email:
625 return email
623 return email
626
624
627 # No valid email, not a valid user in the system, none!
625 # No valid email, not a valid user in the system, none!
628 return None
626 return None
629
627
630
628
631 def person(author, show_attr="username"):
629 def person(author, show_attr="username"):
632 """Find the user identified by 'author', return one of the users attributes,
630 """Find the user identified by 'author', return one of the users attributes,
633 default to the username attribute, None if there is no user"""
631 default to the username attribute, None if there is no user"""
634 from kallithea.model.db import User
632 from kallithea.model.db import User
635
633
636 # if author is already an instance use it for extraction
634 # if author is already an instance use it for extraction
637 if isinstance(author, User):
635 if isinstance(author, User):
638 return getattr(author, show_attr)
636 return getattr(author, show_attr)
639
637
640 value = user_attr_or_none(author, show_attr)
638 value = user_attr_or_none(author, show_attr)
641 if value is not None:
639 if value is not None:
642 return value
640 return value
643
641
644 # Still nothing? Just pass back the author name if any, else the email
642 # Still nothing? Just pass back the author name if any, else the email
645 return author_name(author) or email(author)
643 return author_name(author) or email(author)
646
644
647
645
648 def person_by_id(id_, show_attr="username"):
646 def person_by_id(id_, show_attr="username"):
649 from kallithea.model.db import User
647 from kallithea.model.db import User
650
648
651 # maybe it's an ID ?
649 # maybe it's an ID ?
652 if str(id_).isdigit() or isinstance(id_, int):
650 if str(id_).isdigit() or isinstance(id_, int):
653 id_ = int(id_)
651 id_ = int(id_)
654 user = User.get(id_)
652 user = User.get(id_)
655 if user is not None:
653 if user is not None:
656 return getattr(user, show_attr)
654 return getattr(user, show_attr)
657 return id_
655 return id_
658
656
659
657
660 def boolicon(value):
658 def boolicon(value):
661 """Returns boolean value of a value, represented as small html image of true/false
659 """Returns boolean value of a value, represented as small html image of true/false
662 icons
660 icons
663
661
664 :param value: value
662 :param value: value
665 """
663 """
666
664
667 if value:
665 if value:
668 return HTML.tag('i', class_="icon-ok")
666 return HTML.tag('i', class_="icon-ok")
669 else:
667 else:
670 return HTML.tag('i', class_="icon-minus-circled")
668 return HTML.tag('i', class_="icon-minus-circled")
671
669
672
670
673 def action_parser(user_log, feed=False, parse_cs=False):
671 def action_parser(user_log, feed=False, parse_cs=False):
674 """
672 """
675 This helper will action_map the specified string action into translated
673 This helper will action_map the specified string action into translated
676 fancy names with icons and links
674 fancy names with icons and links
677
675
678 :param user_log: user log instance
676 :param user_log: user log instance
679 :param feed: use output for feeds (no html and fancy icons)
677 :param feed: use output for feeds (no html and fancy icons)
680 :param parse_cs: parse Changesets into VCS instances
678 :param parse_cs: parse Changesets into VCS instances
681 """
679 """
682
680
683 action = user_log.action
681 action = user_log.action
684 action_params = ' '
682 action_params = ' '
685
683
686 x = action.split(':')
684 x = action.split(':')
687
685
688 if len(x) > 1:
686 if len(x) > 1:
689 action, action_params = x
687 action, action_params = x
690
688
691 def get_cs_links():
689 def get_cs_links():
692 revs_limit = 3 # display this amount always
690 revs_limit = 3 # display this amount always
693 revs_top_limit = 50 # show upto this amount of changesets hidden
691 revs_top_limit = 50 # show upto this amount of changesets hidden
694 revs_ids = action_params.split(',')
692 revs_ids = action_params.split(',')
695 deleted = user_log.repository is None
693 deleted = user_log.repository is None
696 if deleted:
694 if deleted:
697 return ','.join(revs_ids)
695 return ','.join(revs_ids)
698
696
699 repo_name = user_log.repository.repo_name
697 repo_name = user_log.repository.repo_name
700
698
701 def lnk(rev, repo_name):
699 def lnk(rev, repo_name):
702 lazy_cs = False
700 lazy_cs = False
703 title_ = None
701 title_ = None
704 url_ = '#'
702 url_ = '#'
705 if isinstance(rev, BaseChangeset) or isinstance(rev, AttributeDict):
703 if isinstance(rev, BaseChangeset) or isinstance(rev, AttributeDict):
706 if rev.op and rev.ref_name:
704 if rev.op and rev.ref_name:
707 if rev.op == 'delete_branch':
705 if rev.op == 'delete_branch':
708 lbl = _('Deleted branch: %s') % rev.ref_name
706 lbl = _('Deleted branch: %s') % rev.ref_name
709 elif rev.op == 'tag':
707 elif rev.op == 'tag':
710 lbl = _('Created tag: %s') % rev.ref_name
708 lbl = _('Created tag: %s') % rev.ref_name
711 else:
709 else:
712 lbl = 'Unknown operation %s' % rev.op
710 lbl = 'Unknown operation %s' % rev.op
713 else:
711 else:
714 lazy_cs = True
712 lazy_cs = True
715 lbl = rev.short_id[:8]
713 lbl = rev.short_id[:8]
716 url_ = url('changeset_home', repo_name=repo_name,
714 url_ = url('changeset_home', repo_name=repo_name,
717 revision=rev.raw_id)
715 revision=rev.raw_id)
718 else:
716 else:
719 # changeset cannot be found - it might have been stripped or removed
717 # changeset cannot be found - it might have been stripped or removed
720 lbl = rev[:12]
718 lbl = rev[:12]
721 title_ = _('Changeset %s not found') % lbl
719 title_ = _('Changeset %s not found') % lbl
722 if parse_cs:
720 if parse_cs:
723 return link_to(lbl, url_, title=title_, **{'data-toggle': 'tooltip'})
721 return link_to(lbl, url_, title=title_, **{'data-toggle': 'tooltip'})
724 return link_to(lbl, url_, class_='lazy-cs' if lazy_cs else '',
722 return link_to(lbl, url_, class_='lazy-cs' if lazy_cs else '',
725 **{'data-raw_id': rev.raw_id, 'data-repo_name': repo_name})
723 **{'data-raw_id': rev.raw_id, 'data-repo_name': repo_name})
726
724
727 def _get_op(rev_txt):
725 def _get_op(rev_txt):
728 _op = None
726 _op = None
729 _name = rev_txt
727 _name = rev_txt
730 if len(rev_txt.split('=>')) == 2:
728 if len(rev_txt.split('=>')) == 2:
731 _op, _name = rev_txt.split('=>')
729 _op, _name = rev_txt.split('=>')
732 return _op, _name
730 return _op, _name
733
731
734 revs = []
732 revs = []
735 if len([v for v in revs_ids if v != '']) > 0:
733 if len([v for v in revs_ids if v != '']) > 0:
736 repo = None
734 repo = None
737 for rev in revs_ids[:revs_top_limit]:
735 for rev in revs_ids[:revs_top_limit]:
738 _op, _name = _get_op(rev)
736 _op, _name = _get_op(rev)
739
737
740 # we want parsed changesets, or new log store format is bad
738 # we want parsed changesets, or new log store format is bad
741 if parse_cs:
739 if parse_cs:
742 try:
740 try:
743 if repo is None:
741 if repo is None:
744 repo = user_log.repository.scm_instance
742 repo = user_log.repository.scm_instance
745 _rev = repo.get_changeset(rev)
743 _rev = repo.get_changeset(rev)
746 revs.append(_rev)
744 revs.append(_rev)
747 except ChangesetDoesNotExistError:
745 except ChangesetDoesNotExistError:
748 log.error('cannot find revision %s in this repo', rev)
746 log.error('cannot find revision %s in this repo', rev)
749 revs.append(rev)
747 revs.append(rev)
750 else:
748 else:
751 _rev = AttributeDict({
749 _rev = AttributeDict({
752 'short_id': rev[:12],
750 'short_id': rev[:12],
753 'raw_id': rev,
751 'raw_id': rev,
754 'message': '',
752 'message': '',
755 'op': _op,
753 'op': _op,
756 'ref_name': _name
754 'ref_name': _name
757 })
755 })
758 revs.append(_rev)
756 revs.append(_rev)
759 cs_links = [" " + ', '.join(
757 cs_links = [" " + ', '.join(
760 [lnk(rev, repo_name) for rev in revs[:revs_limit]]
758 [lnk(rev, repo_name) for rev in revs[:revs_limit]]
761 )]
759 )]
762 _op1, _name1 = _get_op(revs_ids[0])
760 _op1, _name1 = _get_op(revs_ids[0])
763 _op2, _name2 = _get_op(revs_ids[-1])
761 _op2, _name2 = _get_op(revs_ids[-1])
764
762
765 _rev = '%s...%s' % (_name1, _name2)
763 _rev = '%s...%s' % (_name1, _name2)
766
764
767 compare_view = (
765 compare_view = (
768 ' <div class="compare_view" data-toggle="tooltip" title="%s">'
766 ' <div class="compare_view" data-toggle="tooltip" title="%s">'
769 '<a href="%s">%s</a> </div>' % (
767 '<a href="%s">%s</a> </div>' % (
770 _('Show all combined changesets %s->%s') % (
768 _('Show all combined changesets %s->%s') % (
771 revs_ids[0][:12], revs_ids[-1][:12]
769 revs_ids[0][:12], revs_ids[-1][:12]
772 ),
770 ),
773 url('changeset_home', repo_name=repo_name,
771 url('changeset_home', repo_name=repo_name,
774 revision=_rev
772 revision=_rev
775 ),
773 ),
776 _('Compare view')
774 _('Compare view')
777 )
775 )
778 )
776 )
779
777
780 # if we have exactly one more than normally displayed
778 # if we have exactly one more than normally displayed
781 # just display it, takes less space than displaying
779 # just display it, takes less space than displaying
782 # "and 1 more revisions"
780 # "and 1 more revisions"
783 if len(revs_ids) == revs_limit + 1:
781 if len(revs_ids) == revs_limit + 1:
784 cs_links.append(", " + lnk(revs[revs_limit], repo_name))
782 cs_links.append(", " + lnk(revs[revs_limit], repo_name))
785
783
786 # hidden-by-default ones
784 # hidden-by-default ones
787 if len(revs_ids) > revs_limit + 1:
785 if len(revs_ids) > revs_limit + 1:
788 uniq_id = revs_ids[0]
786 uniq_id = revs_ids[0]
789 html_tmpl = (
787 html_tmpl = (
790 '<span> %s <a class="show_more" id="_%s" '
788 '<span> %s <a class="show_more" id="_%s" '
791 'href="#more">%s</a> %s</span>'
789 'href="#more">%s</a> %s</span>'
792 )
790 )
793 if not feed:
791 if not feed:
794 cs_links.append(html_tmpl % (
792 cs_links.append(html_tmpl % (
795 _('and'),
793 _('and'),
796 uniq_id, _('%s more') % (len(revs_ids) - revs_limit),
794 uniq_id, _('%s more') % (len(revs_ids) - revs_limit),
797 _('revisions')
795 _('revisions')
798 )
796 )
799 )
797 )
800
798
801 if not feed:
799 if not feed:
802 html_tmpl = '<span id="%s" style="display:none">, %s </span>'
800 html_tmpl = '<span id="%s" style="display:none">, %s </span>'
803 else:
801 else:
804 html_tmpl = '<span id="%s"> %s </span>'
802 html_tmpl = '<span id="%s"> %s </span>'
805
803
806 morelinks = ', '.join(
804 morelinks = ', '.join(
807 [lnk(rev, repo_name) for rev in revs[revs_limit:]]
805 [lnk(rev, repo_name) for rev in revs[revs_limit:]]
808 )
806 )
809
807
810 if len(revs_ids) > revs_top_limit:
808 if len(revs_ids) > revs_top_limit:
811 morelinks += ', ...'
809 morelinks += ', ...'
812
810
813 cs_links.append(html_tmpl % (uniq_id, morelinks))
811 cs_links.append(html_tmpl % (uniq_id, morelinks))
814 if len(revs) > 1:
812 if len(revs) > 1:
815 cs_links.append(compare_view)
813 cs_links.append(compare_view)
816 return ''.join(cs_links)
814 return ''.join(cs_links)
817
815
818 def get_fork_name():
816 def get_fork_name():
819 repo_name = action_params
817 repo_name = action_params
820 url_ = url('summary_home', repo_name=repo_name)
818 url_ = url('summary_home', repo_name=repo_name)
821 return _('Fork name %s') % link_to(action_params, url_)
819 return _('Fork name %s') % link_to(action_params, url_)
822
820
823 def get_user_name():
821 def get_user_name():
824 user_name = action_params
822 user_name = action_params
825 return user_name
823 return user_name
826
824
827 def get_users_group():
825 def get_users_group():
828 group_name = action_params
826 group_name = action_params
829 return group_name
827 return group_name
830
828
831 def get_pull_request():
829 def get_pull_request():
832 from kallithea.model.db import PullRequest
830 from kallithea.model.db import PullRequest
833 pull_request_id = action_params
831 pull_request_id = action_params
834 nice_id = PullRequest.make_nice_id(pull_request_id)
832 nice_id = PullRequest.make_nice_id(pull_request_id)
835
833
836 deleted = user_log.repository is None
834 deleted = user_log.repository is None
837 if deleted:
835 if deleted:
838 repo_name = user_log.repository_name
836 repo_name = user_log.repository_name
839 else:
837 else:
840 repo_name = user_log.repository.repo_name
838 repo_name = user_log.repository.repo_name
841
839
842 return link_to(_('Pull request %s') % nice_id,
840 return link_to(_('Pull request %s') % nice_id,
843 url('pullrequest_show', repo_name=repo_name,
841 url('pullrequest_show', repo_name=repo_name,
844 pull_request_id=pull_request_id))
842 pull_request_id=pull_request_id))
845
843
846 def get_archive_name():
844 def get_archive_name():
847 archive_name = action_params
845 archive_name = action_params
848 return archive_name
846 return archive_name
849
847
850 # action : translated str, callback(extractor), icon
848 # action : translated str, callback(extractor), icon
851 action_map = {
849 action_map = {
852 'user_deleted_repo': (_('[deleted] repository'),
850 'user_deleted_repo': (_('[deleted] repository'),
853 None, 'icon-trashcan'),
851 None, 'icon-trashcan'),
854 'user_created_repo': (_('[created] repository'),
852 'user_created_repo': (_('[created] repository'),
855 None, 'icon-plus'),
853 None, 'icon-plus'),
856 'user_created_fork': (_('[created] repository as fork'),
854 'user_created_fork': (_('[created] repository as fork'),
857 None, 'icon-fork'),
855 None, 'icon-fork'),
858 'user_forked_repo': (_('[forked] repository'),
856 'user_forked_repo': (_('[forked] repository'),
859 get_fork_name, 'icon-fork'),
857 get_fork_name, 'icon-fork'),
860 'user_updated_repo': (_('[updated] repository'),
858 'user_updated_repo': (_('[updated] repository'),
861 None, 'icon-pencil'),
859 None, 'icon-pencil'),
862 'user_downloaded_archive': (_('[downloaded] archive from repository'),
860 'user_downloaded_archive': (_('[downloaded] archive from repository'),
863 get_archive_name, 'icon-download-cloud'),
861 get_archive_name, 'icon-download-cloud'),
864 'admin_deleted_repo': (_('[delete] repository'),
862 'admin_deleted_repo': (_('[delete] repository'),
865 None, 'icon-trashcan'),
863 None, 'icon-trashcan'),
866 'admin_created_repo': (_('[created] repository'),
864 'admin_created_repo': (_('[created] repository'),
867 None, 'icon-plus'),
865 None, 'icon-plus'),
868 'admin_forked_repo': (_('[forked] repository'),
866 'admin_forked_repo': (_('[forked] repository'),
869 None, 'icon-fork'),
867 None, 'icon-fork'),
870 'admin_updated_repo': (_('[updated] repository'),
868 'admin_updated_repo': (_('[updated] repository'),
871 None, 'icon-pencil'),
869 None, 'icon-pencil'),
872 'admin_created_user': (_('[created] user'),
870 'admin_created_user': (_('[created] user'),
873 get_user_name, 'icon-user'),
871 get_user_name, 'icon-user'),
874 'admin_updated_user': (_('[updated] user'),
872 'admin_updated_user': (_('[updated] user'),
875 get_user_name, 'icon-user'),
873 get_user_name, 'icon-user'),
876 'admin_created_users_group': (_('[created] user group'),
874 'admin_created_users_group': (_('[created] user group'),
877 get_users_group, 'icon-pencil'),
875 get_users_group, 'icon-pencil'),
878 'admin_updated_users_group': (_('[updated] user group'),
876 'admin_updated_users_group': (_('[updated] user group'),
879 get_users_group, 'icon-pencil'),
877 get_users_group, 'icon-pencil'),
880 'user_commented_revision': (_('[commented] on revision in repository'),
878 'user_commented_revision': (_('[commented] on revision in repository'),
881 get_cs_links, 'icon-comment'),
879 get_cs_links, 'icon-comment'),
882 'user_commented_pull_request': (_('[commented] on pull request for'),
880 'user_commented_pull_request': (_('[commented] on pull request for'),
883 get_pull_request, 'icon-comment'),
881 get_pull_request, 'icon-comment'),
884 'user_closed_pull_request': (_('[closed] pull request for'),
882 'user_closed_pull_request': (_('[closed] pull request for'),
885 get_pull_request, 'icon-ok'),
883 get_pull_request, 'icon-ok'),
886 'push': (_('[pushed] into'),
884 'push': (_('[pushed] into'),
887 get_cs_links, 'icon-move-up'),
885 get_cs_links, 'icon-move-up'),
888 'push_local': (_('[committed via Kallithea] into repository'),
886 'push_local': (_('[committed via Kallithea] into repository'),
889 get_cs_links, 'icon-pencil'),
887 get_cs_links, 'icon-pencil'),
890 'push_remote': (_('[pulled from remote] into repository'),
888 'push_remote': (_('[pulled from remote] into repository'),
891 get_cs_links, 'icon-move-up'),
889 get_cs_links, 'icon-move-up'),
892 'pull': (_('[pulled] from'),
890 'pull': (_('[pulled] from'),
893 None, 'icon-move-down'),
891 None, 'icon-move-down'),
894 'started_following_repo': (_('[started following] repository'),
892 'started_following_repo': (_('[started following] repository'),
895 None, 'icon-heart'),
893 None, 'icon-heart'),
896 'stopped_following_repo': (_('[stopped following] repository'),
894 'stopped_following_repo': (_('[stopped following] repository'),
897 None, 'icon-heart-empty'),
895 None, 'icon-heart-empty'),
898 }
896 }
899
897
900 action_str = action_map.get(action, action)
898 action_str = action_map.get(action, action)
901 if feed:
899 if feed:
902 action = action_str[0].replace('[', '').replace(']', '')
900 action = action_str[0].replace('[', '').replace(']', '')
903 else:
901 else:
904 action = action_str[0] \
902 action = action_str[0] \
905 .replace('[', '<b>') \
903 .replace('[', '<b>') \
906 .replace(']', '</b>')
904 .replace(']', '</b>')
907
905
908 action_params_func = action_str[1] if callable(action_str[1]) else (lambda: "")
906 action_params_func = action_str[1] if callable(action_str[1]) else (lambda: "")
909
907
910 def action_parser_icon():
908 def action_parser_icon():
911 action = user_log.action
909 action = user_log.action
912 action_params = None
910 action_params = None
913 x = action.split(':')
911 x = action.split(':')
914
912
915 if len(x) > 1:
913 if len(x) > 1:
916 action, action_params = x
914 action, action_params = x
917
915
918 ico = action_map.get(action, ['', '', ''])[2]
916 ico = action_map.get(action, ['', '', ''])[2]
919 html = """<i class="%s"></i>""" % ico
917 html = """<i class="%s"></i>""" % ico
920 return literal(html)
918 return literal(html)
921
919
922 # returned callbacks we need to call to get
920 # returned callbacks we need to call to get
923 return [lambda: literal(action), action_params_func, action_parser_icon]
921 return [lambda: literal(action), action_params_func, action_parser_icon]
924
922
925
923
926 #==============================================================================
924 #==============================================================================
927 # GRAVATAR URL
925 # GRAVATAR URL
928 #==============================================================================
926 #==============================================================================
929 def gravatar_div(email_address, cls='', size=30, **div_attributes):
927 def gravatar_div(email_address, cls='', size=30, **div_attributes):
930 """Return an html literal with a span around a gravatar if they are enabled.
928 """Return an html literal with a span around a gravatar if they are enabled.
931 Extra keyword parameters starting with 'div_' will get the prefix removed
929 Extra keyword parameters starting with 'div_' will get the prefix removed
932 and '_' changed to '-' and be used as attributes on the div. The default
930 and '_' changed to '-' and be used as attributes on the div. The default
933 class is 'gravatar'.
931 class is 'gravatar'.
934 """
932 """
935 from tg import tmpl_context as c
933 from tg import tmpl_context as c
936 if not c.visual.use_gravatar:
934 if not c.visual.use_gravatar:
937 return ''
935 return ''
938 if 'div_class' not in div_attributes:
936 if 'div_class' not in div_attributes:
939 div_attributes['div_class'] = "gravatar"
937 div_attributes['div_class'] = "gravatar"
940 attributes = []
938 attributes = []
941 for k, v in sorted(div_attributes.items()):
939 for k, v in sorted(div_attributes.items()):
942 assert k.startswith('div_'), k
940 assert k.startswith('div_'), k
943 attributes.append(' %s="%s"' % (k[4:].replace('_', '-'), escape(v)))
941 attributes.append(' %s="%s"' % (k[4:].replace('_', '-'), escape(v)))
944 return literal("""<span%s>%s</span>""" %
942 return literal("""<span%s>%s</span>""" %
945 (''.join(attributes),
943 (''.join(attributes),
946 gravatar(email_address, cls=cls, size=size)))
944 gravatar(email_address, cls=cls, size=size)))
947
945
948
946
949 def gravatar(email_address, cls='', size=30):
947 def gravatar(email_address, cls='', size=30):
950 """return html element of the gravatar
948 """return html element of the gravatar
951
949
952 This method will return an <img> with the resolution double the size (for
950 This method will return an <img> with the resolution double the size (for
953 retina screens) of the image. If the url returned from gravatar_url is
951 retina screens) of the image. If the url returned from gravatar_url is
954 empty then we fallback to using an icon.
952 empty then we fallback to using an icon.
955
953
956 """
954 """
957 from tg import tmpl_context as c
955 from tg import tmpl_context as c
958 if not c.visual.use_gravatar:
956 if not c.visual.use_gravatar:
959 return ''
957 return ''
960
958
961 src = gravatar_url(email_address, size * 2)
959 src = gravatar_url(email_address, size * 2)
962
960
963 if src:
961 if src:
964 # here it makes sense to use style="width: ..." (instead of, say, a
962 # here it makes sense to use style="width: ..." (instead of, say, a
965 # stylesheet) because we using this to generate a high-res (retina) size
963 # stylesheet) because we using this to generate a high-res (retina) size
966 html = ('<i class="icon-gravatar {cls}"'
964 html = ('<i class="icon-gravatar {cls}"'
967 ' style="font-size: {size}px;background-size: {size}px;background-image: url(\'{src}\')"'
965 ' style="font-size: {size}px;background-size: {size}px;background-image: url(\'{src}\')"'
968 '></i>').format(cls=cls, size=size, src=src)
966 '></i>').format(cls=cls, size=size, src=src)
969
967
970 else:
968 else:
971 # if src is empty then there was no gravatar, so we use a font icon
969 # if src is empty then there was no gravatar, so we use a font icon
972 html = ("""<i class="icon-user {cls}" style="font-size: {size}px;"></i>"""
970 html = ("""<i class="icon-user {cls}" style="font-size: {size}px;"></i>"""
973 .format(cls=cls, size=size))
971 .format(cls=cls, size=size))
974
972
975 return literal(html)
973 return literal(html)
976
974
977
975
978 def gravatar_url(email_address, size=30, default=''):
976 def gravatar_url(email_address, size=30, default=''):
979 from tg import tmpl_context as c
977 from tg import tmpl_context as c
980
978
981 if not c.visual.use_gravatar:
979 if not c.visual.use_gravatar:
982 return ""
980 return ""
983
981
984 _def = 'anonymous@kallithea-scm.org' # default gravatar
982 _def = 'anonymous@kallithea-scm.org' # default gravatar
985 email_address = email_address or _def
983 email_address = email_address or _def
986
984
987 if email_address == _def:
985 if email_address == _def:
988 return default
986 return default
989
987
990 # re-import url so tests can mock it
988 # re-import url so tests can mock it
991 from kallithea.config.routing import url
989 from kallithea.config.routing import url
992 from kallithea.model.db import User
990 from kallithea.model.db import User
993
991
994 parsed_url = urllib.parse.urlparse(url.current(qualified=True))
992 parsed_url = urllib.parse.urlparse(url.current(qualified=True))
995 url = (c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL) \
993 url = (c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL) \
996 .replace('{email}', email_address) \
994 .replace('{email}', email_address) \
997 .replace('{md5email}', hashlib.md5(safe_bytes(email_address).lower()).hexdigest()) \
995 .replace('{md5email}', hashlib.md5(safe_bytes(email_address).lower()).hexdigest()) \
998 .replace('{netloc}', parsed_url.netloc) \
996 .replace('{netloc}', parsed_url.netloc) \
999 .replace('{scheme}', parsed_url.scheme) \
997 .replace('{scheme}', parsed_url.scheme) \
1000 .replace('{size}', str(size))
998 .replace('{size}', str(size))
1001 return url
999 return url
1002
1000
1003
1001
1004 def changed_tooltip(nodes):
1002 def changed_tooltip(nodes):
1005 """
1003 """
1006 Generates a html string for changed nodes in changeset page.
1004 Generates a html string for changed nodes in changeset page.
1007 It limits the output to 30 entries
1005 It limits the output to 30 entries
1008
1006
1009 :param nodes: LazyNodesGenerator
1007 :param nodes: LazyNodesGenerator
1010 """
1008 """
1011 if nodes:
1009 if nodes:
1012 pref = ': <br/> '
1010 pref = ': <br/> '
1013 suf = ''
1011 suf = ''
1014 if len(nodes) > 30:
1012 if len(nodes) > 30:
1015 suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
1013 suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
1016 return literal(pref + '<br/> '.join([x.path
1014 return literal(pref + '<br/> '.join([x.path
1017 for x in nodes[:30]]) + suf)
1015 for x in nodes[:30]]) + suf)
1018 else:
1016 else:
1019 return ': ' + _('No files')
1017 return ': ' + _('No files')
1020
1018
1021
1019
1022 def fancy_file_stats(stats):
1020 def fancy_file_stats(stats):
1023 """
1021 """
1024 Displays a fancy two colored bar for number of added/deleted
1022 Displays a fancy two colored bar for number of added/deleted
1025 lines of code on file
1023 lines of code on file
1026
1024
1027 :param stats: two element list of added/deleted lines of code
1025 :param stats: two element list of added/deleted lines of code
1028 """
1026 """
1029 from kallithea.lib.diffs import BIN_FILENODE, CHMOD_FILENODE, DEL_FILENODE, MOD_FILENODE, NEW_FILENODE, RENAMED_FILENODE
1027 from kallithea.lib.diffs import BIN_FILENODE, CHMOD_FILENODE, DEL_FILENODE, MOD_FILENODE, NEW_FILENODE, RENAMED_FILENODE
1030
1028
1031 a, d = stats['added'], stats['deleted']
1029 a, d = stats['added'], stats['deleted']
1032 width = 100
1030 width = 100
1033
1031
1034 if stats['binary']:
1032 if stats['binary']:
1035 # binary mode
1033 # binary mode
1036 lbl = ''
1034 lbl = ''
1037 bin_op = 1
1035 bin_op = 1
1038
1036
1039 if BIN_FILENODE in stats['ops']:
1037 if BIN_FILENODE in stats['ops']:
1040 lbl = 'bin+'
1038 lbl = 'bin+'
1041
1039
1042 if NEW_FILENODE in stats['ops']:
1040 if NEW_FILENODE in stats['ops']:
1043 lbl += _('new file')
1041 lbl += _('new file')
1044 bin_op = NEW_FILENODE
1042 bin_op = NEW_FILENODE
1045 elif MOD_FILENODE in stats['ops']:
1043 elif MOD_FILENODE in stats['ops']:
1046 lbl += _('mod')
1044 lbl += _('mod')
1047 bin_op = MOD_FILENODE
1045 bin_op = MOD_FILENODE
1048 elif DEL_FILENODE in stats['ops']:
1046 elif DEL_FILENODE in stats['ops']:
1049 lbl += _('del')
1047 lbl += _('del')
1050 bin_op = DEL_FILENODE
1048 bin_op = DEL_FILENODE
1051 elif RENAMED_FILENODE in stats['ops']:
1049 elif RENAMED_FILENODE in stats['ops']:
1052 lbl += _('rename')
1050 lbl += _('rename')
1053 bin_op = RENAMED_FILENODE
1051 bin_op = RENAMED_FILENODE
1054
1052
1055 # chmod can go with other operations
1053 # chmod can go with other operations
1056 if CHMOD_FILENODE in stats['ops']:
1054 if CHMOD_FILENODE in stats['ops']:
1057 _org_lbl = _('chmod')
1055 _org_lbl = _('chmod')
1058 lbl += _org_lbl if lbl.endswith('+') else '+%s' % _org_lbl
1056 lbl += _org_lbl if lbl.endswith('+') else '+%s' % _org_lbl
1059
1057
1060 #import ipdb;ipdb.set_trace()
1058 #import ipdb;ipdb.set_trace()
1061 b_d = '<div class="bin bin%s progress-bar" style="width:100%%">%s</div>' % (bin_op, lbl)
1059 b_d = '<div class="bin bin%s progress-bar" style="width:100%%">%s</div>' % (bin_op, lbl)
1062 b_a = '<div class="bin bin1" style="width:0%"></div>'
1060 b_a = '<div class="bin bin1" style="width:0%"></div>'
1063 return literal('<div style="width:%spx" class="progress">%s%s</div>' % (width, b_a, b_d))
1061 return literal('<div style="width:%spx" class="progress">%s%s</div>' % (width, b_a, b_d))
1064
1062
1065 t = stats['added'] + stats['deleted']
1063 t = stats['added'] + stats['deleted']
1066 unit = float(width) / (t or 1)
1064 unit = float(width) / (t or 1)
1067
1065
1068 # needs > 9% of width to be visible or 0 to be hidden
1066 # needs > 9% of width to be visible or 0 to be hidden
1069 a_p = max(9, unit * a) if a > 0 else 0
1067 a_p = max(9, unit * a) if a > 0 else 0
1070 d_p = max(9, unit * d) if d > 0 else 0
1068 d_p = max(9, unit * d) if d > 0 else 0
1071 p_sum = a_p + d_p
1069 p_sum = a_p + d_p
1072
1070
1073 if p_sum > width:
1071 if p_sum > width:
1074 # adjust the percentage to be == 100% since we adjusted to 9
1072 # adjust the percentage to be == 100% since we adjusted to 9
1075 if a_p > d_p:
1073 if a_p > d_p:
1076 a_p = a_p - (p_sum - width)
1074 a_p = a_p - (p_sum - width)
1077 else:
1075 else:
1078 d_p = d_p - (p_sum - width)
1076 d_p = d_p - (p_sum - width)
1079
1077
1080 a_v = a if a > 0 else ''
1078 a_v = a if a > 0 else ''
1081 d_v = d if d > 0 else ''
1079 d_v = d if d > 0 else ''
1082
1080
1083 d_a = '<div class="added progress-bar" style="width:%s%%">%s</div>' % (
1081 d_a = '<div class="added progress-bar" style="width:%s%%">%s</div>' % (
1084 a_p, a_v
1082 a_p, a_v
1085 )
1083 )
1086 d_d = '<div class="deleted progress-bar" style="width:%s%%">%s</div>' % (
1084 d_d = '<div class="deleted progress-bar" style="width:%s%%">%s</div>' % (
1087 d_p, d_v
1085 d_p, d_v
1088 )
1086 )
1089 return literal('<div class="progress" style="width:%spx">%s%s</div>' % (width, d_a, d_d))
1087 return literal('<div class="progress" style="width:%spx">%s%s</div>' % (width, d_a, d_d))
1090
1088
1091
1089
1092 _URLIFY_RE = re.compile(r'''
1090 _URLIFY_RE = re.compile(r'''
1093 # URL markup
1091 # URL markup
1094 (?P<url>%s) |
1092 (?P<url>%s) |
1095 # @mention markup
1093 # @mention markup
1096 (?P<mention>%s) |
1094 (?P<mention>%s) |
1097 # Changeset hash markup
1095 # Changeset hash markup
1098 (?<!\w|[-_])
1096 (?<!\w|[-_])
1099 (?P<hash>[0-9a-f]{12,40})
1097 (?P<hash>[0-9a-f]{12,40})
1100 (?!\w|[-_]) |
1098 (?!\w|[-_]) |
1101 # Markup of *bold text*
1099 # Markup of *bold text*
1102 (?:
1100 (?:
1103 (?:^|(?<=\s))
1101 (?:^|(?<=\s))
1104 (?P<bold> [*] (?!\s) [^*\n]* (?<!\s) [*] )
1102 (?P<bold> [*] (?!\s) [^*\n]* (?<!\s) [*] )
1105 (?![*\w])
1103 (?![*\w])
1106 ) |
1104 ) |
1107 # "Stylize" markup
1105 # "Stylize" markup
1108 \[see\ \=&gt;\ *(?P<seen>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] |
1106 \[see\ \=&gt;\ *(?P<seen>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] |
1109 \[license\ \=&gt;\ *(?P<license>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] |
1107 \[license\ \=&gt;\ *(?P<license>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] |
1110 \[(?P<tagtype>requires|recommends|conflicts|base)\ \=&gt;\ *(?P<tagvalue>[a-zA-Z0-9\-\/]*)\] |
1108 \[(?P<tagtype>requires|recommends|conflicts|base)\ \=&gt;\ *(?P<tagvalue>[a-zA-Z0-9\-\/]*)\] |
1111 \[(?:lang|language)\ \=&gt;\ *(?P<lang>[a-zA-Z\-\/\#\+]*)\] |
1109 \[(?:lang|language)\ \=&gt;\ *(?P<lang>[a-zA-Z\-\/\#\+]*)\] |
1112 \[(?P<tag>[a-z]+)\]
1110 \[(?P<tag>[a-z]+)\]
1113 ''' % (url_re.pattern, MENTIONS_REGEX.pattern),
1111 ''' % (url_re.pattern, MENTIONS_REGEX.pattern),
1114 re.VERBOSE | re.MULTILINE | re.IGNORECASE)
1112 re.VERBOSE | re.MULTILINE | re.IGNORECASE)
1115
1113
1116
1114
1117 def urlify_text(s, repo_name=None, link_=None, truncate=None, stylize=False, truncatef=truncate):
1115 def urlify_text(s, repo_name=None, link_=None, truncate=None, stylize=False, truncatef=truncate):
1118 """
1116 """
1119 Parses given text message and make literal html with markup.
1117 Parses given text message and make literal html with markup.
1120 The text will be truncated to the specified length.
1118 The text will be truncated to the specified length.
1121 Hashes are turned into changeset links to specified repository.
1119 Hashes are turned into changeset links to specified repository.
1122 URLs links to what they say.
1120 URLs links to what they say.
1123 Issues are linked to given issue-server.
1121 Issues are linked to given issue-server.
1124 If link_ is provided, all text not already linking somewhere will link there.
1122 If link_ is provided, all text not already linking somewhere will link there.
1125 >>> urlify_text("Urlify http://example.com/ and 'https://example.com' *and* <b>markup/b>")
1123 >>> urlify_text("Urlify http://example.com/ and 'https://example.com' *and* <b>markup/b>")
1126 literal('Urlify <a href="http://example.com/">http://example.com/</a> and &#39;<a href="https://example.com&apos">https://example.com&apos</a>; <b>*and*</b> &lt;b&gt;markup/b&gt;')
1124 literal('Urlify <a href="http://example.com/">http://example.com/</a> and &#39;<a href="https://example.com&apos">https://example.com&apos</a>; <b>*and*</b> &lt;b&gt;markup/b&gt;')
1127 """
1125 """
1128
1126
1129 def _replace(match_obj):
1127 def _replace(match_obj):
1130 url = match_obj.group('url')
1128 url = match_obj.group('url')
1131 if url is not None:
1129 if url is not None:
1132 return '<a href="%(url)s">%(url)s</a>' % {'url': url}
1130 return '<a href="%(url)s">%(url)s</a>' % {'url': url}
1133 mention = match_obj.group('mention')
1131 mention = match_obj.group('mention')
1134 if mention is not None:
1132 if mention is not None:
1135 return '<b>%s</b>' % mention
1133 return '<b>%s</b>' % mention
1136 hash_ = match_obj.group('hash')
1134 hash_ = match_obj.group('hash')
1137 if hash_ is not None and repo_name is not None:
1135 if hash_ is not None and repo_name is not None:
1138 from kallithea.config.routing import url # doh, we need to re-import url to mock it later
1136 from kallithea.config.routing import url # doh, we need to re-import url to mock it later
1139 return '<a class="changeset_hash" href="%(url)s">%(hash)s</a>' % {
1137 return '<a class="changeset_hash" href="%(url)s">%(hash)s</a>' % {
1140 'url': url('changeset_home', repo_name=repo_name, revision=hash_),
1138 'url': url('changeset_home', repo_name=repo_name, revision=hash_),
1141 'hash': hash_,
1139 'hash': hash_,
1142 }
1140 }
1143 bold = match_obj.group('bold')
1141 bold = match_obj.group('bold')
1144 if bold is not None:
1142 if bold is not None:
1145 return '<b>*%s*</b>' % _urlify(bold[1:-1])
1143 return '<b>*%s*</b>' % _urlify(bold[1:-1])
1146 if stylize:
1144 if stylize:
1147 seen = match_obj.group('seen')
1145 seen = match_obj.group('seen')
1148 if seen:
1146 if seen:
1149 return '<div class="label label-meta" data-tag="see">see =&gt; %s</div>' % seen
1147 return '<div class="label label-meta" data-tag="see">see =&gt; %s</div>' % seen
1150 license = match_obj.group('license')
1148 license = match_obj.group('license')
1151 if license:
1149 if license:
1152 return '<div class="label label-meta" data-tag="license"><a href="http://www.opensource.org/licenses/%s">%s</a></div>' % (license, license)
1150 return '<div class="label label-meta" data-tag="license"><a href="http://www.opensource.org/licenses/%s">%s</a></div>' % (license, license)
1153 tagtype = match_obj.group('tagtype')
1151 tagtype = match_obj.group('tagtype')
1154 if tagtype:
1152 if tagtype:
1155 tagvalue = match_obj.group('tagvalue')
1153 tagvalue = match_obj.group('tagvalue')
1156 return '<div class="label label-meta" data-tag="%s">%s =&gt; <a href="/%s">%s</a></div>' % (tagtype, tagtype, tagvalue, tagvalue)
1154 return '<div class="label label-meta" data-tag="%s">%s =&gt; <a href="/%s">%s</a></div>' % (tagtype, tagtype, tagvalue, tagvalue)
1157 lang = match_obj.group('lang')
1155 lang = match_obj.group('lang')
1158 if lang:
1156 if lang:
1159 return '<div class="label label-meta" data-tag="lang">%s</div>' % lang
1157 return '<div class="label label-meta" data-tag="lang">%s</div>' % lang
1160 tag = match_obj.group('tag')
1158 tag = match_obj.group('tag')
1161 if tag:
1159 if tag:
1162 return '<div class="label label-meta" data-tag="%s">%s</div>' % (tag, tag)
1160 return '<div class="label label-meta" data-tag="%s">%s</div>' % (tag, tag)
1163 return match_obj.group(0)
1161 return match_obj.group(0)
1164
1162
1165 def _urlify(s):
1163 def _urlify(s):
1166 """
1164 """
1167 Extract urls from text and make html links out of them
1165 Extract urls from text and make html links out of them
1168 """
1166 """
1169 return _URLIFY_RE.sub(_replace, s)
1167 return _URLIFY_RE.sub(_replace, s)
1170
1168
1171 if truncate is None:
1169 if truncate is None:
1172 s = s.rstrip()
1170 s = s.rstrip()
1173 else:
1171 else:
1174 s = truncatef(s, truncate, whole_word=True)
1172 s = truncatef(s, truncate, whole_word=True)
1175 s = html_escape(s)
1173 s = html_escape(s)
1176 s = _urlify(s)
1174 s = _urlify(s)
1177 if repo_name is not None:
1175 if repo_name is not None:
1178 s = urlify_issues(s, repo_name)
1176 s = urlify_issues(s, repo_name)
1179 if link_ is not None:
1177 if link_ is not None:
1180 # make href around everything that isn't a href already
1178 # make href around everything that isn't a href already
1181 s = linkify_others(s, link_)
1179 s = linkify_others(s, link_)
1182 s = s.replace('\r\n', '<br/>').replace('\n', '<br/>')
1180 s = s.replace('\r\n', '<br/>').replace('\n', '<br/>')
1183 # Turn HTML5 into more valid HTML4 as required by some mail readers.
1181 # Turn HTML5 into more valid HTML4 as required by some mail readers.
1184 # (This is not done in one step in html_escape, because character codes like
1182 # (This is not done in one step in html_escape, because character codes like
1185 # &#123; risk to be seen as an issue reference due to the presence of '#'.)
1183 # &#123; risk to be seen as an issue reference due to the presence of '#'.)
1186 s = s.replace("&apos;", "&#39;")
1184 s = s.replace("&apos;", "&#39;")
1187 return literal(s)
1185 return literal(s)
1188
1186
1189
1187
1190 def linkify_others(t, l):
1188 def linkify_others(t, l):
1191 """Add a default link to html with links.
1189 """Add a default link to html with links.
1192 HTML doesn't allow nesting of links, so the outer link must be broken up
1190 HTML doesn't allow nesting of links, so the outer link must be broken up
1193 in pieces and give space for other links.
1191 in pieces and give space for other links.
1194 """
1192 """
1195 urls = re.compile(r'(\<a.*?\<\/a\>)',)
1193 urls = re.compile(r'(\<a.*?\<\/a\>)',)
1196 links = []
1194 links = []
1197 for e in urls.split(t):
1195 for e in urls.split(t):
1198 if e.strip() and not urls.match(e):
1196 if e.strip() and not urls.match(e):
1199 links.append('<a class="message-link" href="%s">%s</a>' % (l, e))
1197 links.append('<a class="message-link" href="%s">%s</a>' % (l, e))
1200 else:
1198 else:
1201 links.append(e)
1199 links.append(e)
1202
1200
1203 return ''.join(links)
1201 return ''.join(links)
1204
1202
1205
1203
1206 # Global variable that will hold the actual urlify_issues function body.
1204 # Global variable that will hold the actual urlify_issues function body.
1207 # Will be set on first use when the global configuration has been read.
1205 # Will be set on first use when the global configuration has been read.
1208 _urlify_issues_f = None
1206 _urlify_issues_f = None
1209
1207
1210
1208
1211 def urlify_issues(newtext, repo_name):
1209 def urlify_issues(newtext, repo_name):
1212 """Urlify issue references according to .ini configuration"""
1210 """Urlify issue references according to .ini configuration"""
1213 global _urlify_issues_f
1211 global _urlify_issues_f
1214 if _urlify_issues_f is None:
1212 if _urlify_issues_f is None:
1215 from kallithea import CONFIG
1216 from kallithea.model.db import URL_SEP
1213 from kallithea.model.db import URL_SEP
1217 assert CONFIG['sqlalchemy.url'] # make sure config has been loaded
1214 assert kallithea.CONFIG['sqlalchemy.url'] # make sure config has been loaded
1218
1215
1219 # Build chain of urlify functions, starting with not doing any transformation
1216 # Build chain of urlify functions, starting with not doing any transformation
1220 def tmp_urlify_issues_f(s):
1217 def tmp_urlify_issues_f(s):
1221 return s
1218 return s
1222
1219
1223 issue_pat_re = re.compile(r'issue_pat(.*)')
1220 issue_pat_re = re.compile(r'issue_pat(.*)')
1224 for k in CONFIG:
1221 for k in kallithea.CONFIG:
1225 # Find all issue_pat* settings that also have corresponding server_link and prefix configuration
1222 # Find all issue_pat* settings that also have corresponding server_link and prefix configuration
1226 m = issue_pat_re.match(k)
1223 m = issue_pat_re.match(k)
1227 if m is None:
1224 if m is None:
1228 continue
1225 continue
1229 suffix = m.group(1)
1226 suffix = m.group(1)
1230 issue_pat = CONFIG.get(k)
1227 issue_pat = kallithea.CONFIG.get(k)
1231 issue_server_link = CONFIG.get('issue_server_link%s' % suffix)
1228 issue_server_link = kallithea.CONFIG.get('issue_server_link%s' % suffix)
1232 issue_sub = CONFIG.get('issue_sub%s' % suffix)
1229 issue_sub = kallithea.CONFIG.get('issue_sub%s' % suffix)
1233 issue_prefix = CONFIG.get('issue_prefix%s' % suffix)
1230 issue_prefix = kallithea.CONFIG.get('issue_prefix%s' % suffix)
1234 if issue_prefix:
1231 if issue_prefix:
1235 log.error('found unsupported issue_prefix%s = %r - use issue_sub%s instead', suffix, issue_prefix, suffix)
1232 log.error('found unsupported issue_prefix%s = %r - use issue_sub%s instead', suffix, issue_prefix, suffix)
1236 if not issue_pat:
1233 if not issue_pat:
1237 log.error('skipping incomplete issue pattern %r: it needs a regexp', k)
1234 log.error('skipping incomplete issue pattern %r: it needs a regexp', k)
1238 continue
1235 continue
1239 if not issue_server_link:
1236 if not issue_server_link:
1240 log.error('skipping incomplete issue pattern %r: it needs issue_server_link%s', k, suffix)
1237 log.error('skipping incomplete issue pattern %r: it needs issue_server_link%s', k, suffix)
1241 continue
1238 continue
1242 if issue_sub is None: # issue_sub can be empty but should be present
1239 if issue_sub is None: # issue_sub can be empty but should be present
1243 log.error('skipping incomplete issue pattern %r: it needs (a potentially empty) issue_sub%s', k, suffix)
1240 log.error('skipping incomplete issue pattern %r: it needs (a potentially empty) issue_sub%s', k, suffix)
1244 continue
1241 continue
1245
1242
1246 # Wrap tmp_urlify_issues_f with substitution of this pattern, while making sure all loop variables (and compiled regexpes) are bound
1243 # Wrap tmp_urlify_issues_f with substitution of this pattern, while making sure all loop variables (and compiled regexpes) are bound
1247 try:
1244 try:
1248 issue_re = re.compile(issue_pat)
1245 issue_re = re.compile(issue_pat)
1249 except re.error as e:
1246 except re.error as e:
1250 log.error('skipping invalid issue pattern %r: %r -> %r %r. Error: %s', k, issue_pat, issue_server_link, issue_sub, str(e))
1247 log.error('skipping invalid issue pattern %r: %r -> %r %r. Error: %s', k, issue_pat, issue_server_link, issue_sub, str(e))
1251 continue
1248 continue
1252
1249
1253 log.debug('issue pattern %r: %r -> %r %r', k, issue_pat, issue_server_link, issue_sub)
1250 log.debug('issue pattern %r: %r -> %r %r', k, issue_pat, issue_server_link, issue_sub)
1254
1251
1255 def issues_replace(match_obj,
1252 def issues_replace(match_obj,
1256 issue_server_link=issue_server_link, issue_sub=issue_sub):
1253 issue_server_link=issue_server_link, issue_sub=issue_sub):
1257 try:
1254 try:
1258 issue_url = match_obj.expand(issue_server_link)
1255 issue_url = match_obj.expand(issue_server_link)
1259 except (IndexError, re.error) as e:
1256 except (IndexError, re.error) as e:
1260 log.error('invalid issue_url setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e))
1257 log.error('invalid issue_url setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e))
1261 issue_url = issue_server_link
1258 issue_url = issue_server_link
1262 issue_url = issue_url.replace('{repo}', repo_name)
1259 issue_url = issue_url.replace('{repo}', repo_name)
1263 issue_url = issue_url.replace('{repo_name}', repo_name.split(URL_SEP)[-1])
1260 issue_url = issue_url.replace('{repo_name}', repo_name.split(URL_SEP)[-1])
1264 # if issue_sub is empty use the matched issue reference verbatim
1261 # if issue_sub is empty use the matched issue reference verbatim
1265 if not issue_sub:
1262 if not issue_sub:
1266 issue_text = match_obj.group()
1263 issue_text = match_obj.group()
1267 else:
1264 else:
1268 try:
1265 try:
1269 issue_text = match_obj.expand(issue_sub)
1266 issue_text = match_obj.expand(issue_sub)
1270 except (IndexError, re.error) as e:
1267 except (IndexError, re.error) as e:
1271 log.error('invalid issue_sub setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e))
1268 log.error('invalid issue_sub setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e))
1272 issue_text = match_obj.group()
1269 issue_text = match_obj.group()
1273
1270
1274 return (
1271 return (
1275 '<a class="issue-tracker-link" href="%(url)s">'
1272 '<a class="issue-tracker-link" href="%(url)s">'
1276 '%(text)s'
1273 '%(text)s'
1277 '</a>'
1274 '</a>'
1278 ) % {
1275 ) % {
1279 'url': issue_url,
1276 'url': issue_url,
1280 'text': issue_text,
1277 'text': issue_text,
1281 }
1278 }
1282
1279
1283 def tmp_urlify_issues_f(s, issue_re=issue_re, issues_replace=issues_replace, chain_f=tmp_urlify_issues_f):
1280 def tmp_urlify_issues_f(s, issue_re=issue_re, issues_replace=issues_replace, chain_f=tmp_urlify_issues_f):
1284 return issue_re.sub(issues_replace, chain_f(s))
1281 return issue_re.sub(issues_replace, chain_f(s))
1285
1282
1286 # Set tmp function globally - atomically
1283 # Set tmp function globally - atomically
1287 _urlify_issues_f = tmp_urlify_issues_f
1284 _urlify_issues_f = tmp_urlify_issues_f
1288
1285
1289 return _urlify_issues_f(newtext)
1286 return _urlify_issues_f(newtext)
1290
1287
1291
1288
1292 def render_w_mentions(source, repo_name=None):
1289 def render_w_mentions(source, repo_name=None):
1293 """
1290 """
1294 Render plain text with revision hashes and issue references urlified
1291 Render plain text with revision hashes and issue references urlified
1295 and with @mention highlighting.
1292 and with @mention highlighting.
1296 """
1293 """
1297 s = safe_str(source)
1294 s = safe_str(source)
1298 s = urlify_text(s, repo_name=repo_name)
1295 s = urlify_text(s, repo_name=repo_name)
1299 return literal('<div class="formatted-fixed">%s</div>' % s)
1296 return literal('<div class="formatted-fixed">%s</div>' % s)
1300
1297
1301
1298
1302 def short_ref(ref_type, ref_name):
1299 def short_ref(ref_type, ref_name):
1303 if ref_type == 'rev':
1300 if ref_type == 'rev':
1304 return short_id(ref_name)
1301 return short_id(ref_name)
1305 return ref_name
1302 return ref_name
1306
1303
1307
1304
1308 def link_to_ref(repo_name, ref_type, ref_name, rev=None):
1305 def link_to_ref(repo_name, ref_type, ref_name, rev=None):
1309 """
1306 """
1310 Return full markup for a href to changeset_home for a changeset.
1307 Return full markup for a href to changeset_home for a changeset.
1311 If ref_type is branch it will link to changelog.
1308 If ref_type is branch it will link to changelog.
1312 ref_name is shortened if ref_type is 'rev'.
1309 ref_name is shortened if ref_type is 'rev'.
1313 if rev is specified show it too, explicitly linking to that revision.
1310 if rev is specified show it too, explicitly linking to that revision.
1314 """
1311 """
1315 txt = short_ref(ref_type, ref_name)
1312 txt = short_ref(ref_type, ref_name)
1316 if ref_type == 'branch':
1313 if ref_type == 'branch':
1317 u = url('changelog_home', repo_name=repo_name, branch=ref_name)
1314 u = url('changelog_home', repo_name=repo_name, branch=ref_name)
1318 else:
1315 else:
1319 u = url('changeset_home', repo_name=repo_name, revision=ref_name)
1316 u = url('changeset_home', repo_name=repo_name, revision=ref_name)
1320 l = link_to(repo_name + '#' + txt, u)
1317 l = link_to(repo_name + '#' + txt, u)
1321 if rev and ref_type != 'rev':
1318 if rev and ref_type != 'rev':
1322 l = literal('%s (%s)' % (l, link_to(short_id(rev), url('changeset_home', repo_name=repo_name, revision=rev))))
1319 l = literal('%s (%s)' % (l, link_to(short_id(rev), url('changeset_home', repo_name=repo_name, revision=rev))))
1323 return l
1320 return l
1324
1321
1325
1322
1326 def changeset_status(repo, revision):
1323 def changeset_status(repo, revision):
1327 from kallithea.model.changeset_status import ChangesetStatusModel
1324 from kallithea.model.changeset_status import ChangesetStatusModel
1328 return ChangesetStatusModel().get_status(repo, revision)
1325 return ChangesetStatusModel().get_status(repo, revision)
1329
1326
1330
1327
1331 def changeset_status_lbl(changeset_status):
1328 def changeset_status_lbl(changeset_status):
1332 from kallithea.model.db import ChangesetStatus
1329 from kallithea.model.db import ChangesetStatus
1333 return ChangesetStatus.get_status_lbl(changeset_status)
1330 return ChangesetStatus.get_status_lbl(changeset_status)
1334
1331
1335
1332
1336 def get_permission_name(key):
1333 def get_permission_name(key):
1337 from kallithea.model.db import Permission
1334 from kallithea.model.db import Permission
1338 return dict(Permission.PERMS).get(key)
1335 return dict(Permission.PERMS).get(key)
1339
1336
1340
1337
1341 def journal_filter_help():
1338 def journal_filter_help():
1342 return _(textwrap.dedent('''
1339 return _(textwrap.dedent('''
1343 Example filter terms:
1340 Example filter terms:
1344 repository:vcs
1341 repository:vcs
1345 username:developer
1342 username:developer
1346 action:*push*
1343 action:*push*
1347 ip:127.0.0.1
1344 ip:127.0.0.1
1348 date:20120101
1345 date:20120101
1349 date:[20120101100000 TO 20120102]
1346 date:[20120101100000 TO 20120102]
1350
1347
1351 Generate wildcards using '*' character:
1348 Generate wildcards using '*' character:
1352 "repository:vcs*" - search everything starting with 'vcs'
1349 "repository:vcs*" - search everything starting with 'vcs'
1353 "repository:*vcs*" - search for repository containing 'vcs'
1350 "repository:*vcs*" - search for repository containing 'vcs'
1354
1351
1355 Optional AND / OR operators in queries
1352 Optional AND / OR operators in queries
1356 "repository:vcs OR repository:test"
1353 "repository:vcs OR repository:test"
1357 "username:test AND repository:test*"
1354 "username:test AND repository:test*"
1358 '''))
1355 '''))
1359
1356
1360
1357
1361 def not_mapped_error(repo_name):
1358 def not_mapped_error(repo_name):
1362 flash(_('%s repository is not mapped to db perhaps'
1359 flash(_('%s repository is not mapped to db perhaps'
1363 ' it was created or renamed from the filesystem'
1360 ' it was created or renamed from the filesystem'
1364 ' please run the application again'
1361 ' please run the application again'
1365 ' in order to rescan repositories') % repo_name, category='error')
1362 ' in order to rescan repositories') % repo_name, category='error')
1366
1363
1367
1364
1368 def ip_range(ip_addr):
1365 def ip_range(ip_addr):
1369 from kallithea.model.db import UserIpMap
1366 from kallithea.model.db import UserIpMap
1370 s, e = UserIpMap._get_ip_range(ip_addr)
1367 s, e = UserIpMap._get_ip_range(ip_addr)
1371 return '%s - %s' % (s, e)
1368 return '%s - %s' % (s, e)
1372
1369
1373
1370
1374 session_csrf_secret_name = "_session_csrf_secret_token"
1371 session_csrf_secret_name = "_session_csrf_secret_token"
1375
1372
1376 def session_csrf_secret_token():
1373 def session_csrf_secret_token():
1377 """Return (and create) the current session's CSRF protection token."""
1374 """Return (and create) the current session's CSRF protection token."""
1378 from tg import session
1375 from tg import session
1379 if not session_csrf_secret_name in session:
1376 if not session_csrf_secret_name in session:
1380 session[session_csrf_secret_name] = str(random.getrandbits(128))
1377 session[session_csrf_secret_name] = str(random.getrandbits(128))
1381 session.save()
1378 session.save()
1382 return session[session_csrf_secret_name]
1379 return session[session_csrf_secret_name]
1383
1380
1384 def form(url, method="post", **attrs):
1381 def form(url, method="post", **attrs):
1385 """Like webhelpers.html.tags.form , but automatically adding
1382 """Like webhelpers.html.tags.form , but automatically adding
1386 session_csrf_secret_token for POST. The secret is thus never leaked in GET
1383 session_csrf_secret_token for POST. The secret is thus never leaked in GET
1387 URLs.
1384 URLs.
1388 """
1385 """
1389 form = insecure_form(url, method, **attrs)
1386 form = insecure_form(url, method, **attrs)
1390 if method.lower() == 'get':
1387 if method.lower() == 'get':
1391 return form
1388 return form
1392 return form + HTML.div(hidden(session_csrf_secret_name, session_csrf_secret_token()), style="display: none;")
1389 return form + HTML.div(hidden(session_csrf_secret_name, session_csrf_secret_token()), style="display: none;")
@@ -1,188 +1,188 b''
1 import mock
1 import mock
2
2
3 from kallithea import CONFIG
3 import kallithea
4 from kallithea.config.conf import INDEX_FILENAMES
4 from kallithea.config.conf import INDEX_FILENAMES
5 from kallithea.model.meta import Session
5 from kallithea.model.meta import Session
6 from kallithea.model.repo import RepoModel
6 from kallithea.model.repo import RepoModel
7 from kallithea.model.repo_group import RepoGroupModel
7 from kallithea.model.repo_group import RepoGroupModel
8 from kallithea.tests import base
8 from kallithea.tests import base
9 from kallithea.tests.fixture import Fixture, create_test_index
9 from kallithea.tests.fixture import Fixture, create_test_index
10
10
11
11
12 fixture = Fixture()
12 fixture = Fixture()
13
13
14
14
15 def init_indexing_test(repo):
15 def init_indexing_test(repo):
16 prev = fixture.commit_change(repo.repo_name,
16 prev = fixture.commit_change(repo.repo_name,
17 filename='this_should_be_unique_filename.txt',
17 filename='this_should_be_unique_filename.txt',
18 content='this_should_be_unique_content\n',
18 content='this_should_be_unique_content\n',
19 message='this_should_be_unique_commit_log',
19 message='this_should_be_unique_commit_log',
20 vcs_type='hg',
20 vcs_type='hg',
21 newfile=True)
21 newfile=True)
22
22
23 def init_stopword_test(repo):
23 def init_stopword_test(repo):
24 prev = fixture.commit_change(repo.repo_name,
24 prev = fixture.commit_change(repo.repo_name,
25 filename='this/is/it',
25 filename='this/is/it',
26 content='def test\n',
26 content='def test\n',
27 message='bother to ask where - in folder',
27 message='bother to ask where - in folder',
28 vcs_type='hg',
28 vcs_type='hg',
29 newfile=True)
29 newfile=True)
30 prev = fixture.commit_change(repo.repo_name,
30 prev = fixture.commit_change(repo.repo_name,
31 filename='join.us',
31 filename='join.us',
32 content='def test\n',
32 content='def test\n',
33 message='bother to ask where - top level',
33 message='bother to ask where - top level',
34 author='this is it <this-is-it@foo.bar.com>',
34 author='this is it <this-is-it@foo.bar.com>',
35 vcs_type='hg',
35 vcs_type='hg',
36 parent=prev,
36 parent=prev,
37 newfile=True)
37 newfile=True)
38
38
39
39
40 repos = [
40 repos = [
41 # reponame, init func or fork base, groupname
41 # reponame, init func or fork base, groupname
42 ('indexing_test', init_indexing_test, None),
42 ('indexing_test', init_indexing_test, None),
43 ('indexing_test-fork', 'indexing_test', None),
43 ('indexing_test-fork', 'indexing_test', None),
44 ('group/indexing_test', 'indexing_test', 'group'),
44 ('group/indexing_test', 'indexing_test', 'group'),
45 ('this-is-it', 'indexing_test', None),
45 ('this-is-it', 'indexing_test', None),
46 ('indexing_test-foo', 'indexing_test', None),
46 ('indexing_test-foo', 'indexing_test', None),
47 ('stopword_test', init_stopword_test, None),
47 ('stopword_test', init_stopword_test, None),
48 ]
48 ]
49
49
50
50
51 # map: name => id
51 # map: name => id
52 repoids = {}
52 repoids = {}
53 groupids = {}
53 groupids = {}
54
54
55
55
56 def rebuild_index(full_index):
56 def rebuild_index(full_index):
57 with mock.patch('kallithea.lib.indexers.daemon.log.debug',
57 with mock.patch('kallithea.lib.indexers.daemon.log.debug',
58 lambda *args, **kwargs: None):
58 lambda *args, **kwargs: None):
59 # The more revisions managed repositories have, the more
59 # The more revisions managed repositories have, the more
60 # memory capturing "log.debug()" output in "indexers.daemon"
60 # memory capturing "log.debug()" output in "indexers.daemon"
61 # requires. This may cause unintentional failure of subsequent
61 # requires. This may cause unintentional failure of subsequent
62 # tests, if ENOMEM at forking "git" prevents from rebuilding
62 # tests, if ENOMEM at forking "git" prevents from rebuilding
63 # index for search.
63 # index for search.
64 # Therefore, "log.debug()" is disabled regardless of logging
64 # Therefore, "log.debug()" is disabled regardless of logging
65 # level while rebuilding index.
65 # level while rebuilding index.
66 # (FYI, ENOMEM occurs at forking "git" with python 2.7.3,
66 # (FYI, ENOMEM occurs at forking "git" with python 2.7.3,
67 # Linux 3.2.78-1 x86_64, 3GB memory, and no ulimit
67 # Linux 3.2.78-1 x86_64, 3GB memory, and no ulimit
68 # configuration for memory)
68 # configuration for memory)
69 create_test_index(base.TESTS_TMP_PATH, CONFIG, full_index=full_index)
69 create_test_index(base.TESTS_TMP_PATH, kallithea.CONFIG, full_index=full_index)
70
70
71
71
72 class TestSearchControllerIndexing(base.TestController):
72 class TestSearchControllerIndexing(base.TestController):
73 @classmethod
73 @classmethod
74 def setup_class(cls):
74 def setup_class(cls):
75 for reponame, init_or_fork, groupname in repos:
75 for reponame, init_or_fork, groupname in repos:
76 if groupname and groupname not in groupids:
76 if groupname and groupname not in groupids:
77 group = fixture.create_repo_group(groupname)
77 group = fixture.create_repo_group(groupname)
78 groupids[groupname] = group.group_id
78 groupids[groupname] = group.group_id
79 if callable(init_or_fork):
79 if callable(init_or_fork):
80 repo = fixture.create_repo(reponame,
80 repo = fixture.create_repo(reponame,
81 repo_group=groupname)
81 repo_group=groupname)
82 init_or_fork(repo)
82 init_or_fork(repo)
83 else:
83 else:
84 repo = fixture.create_fork(init_or_fork, reponame,
84 repo = fixture.create_fork(init_or_fork, reponame,
85 repo_group=groupname)
85 repo_group=groupname)
86 repoids[reponame] = repo.repo_id
86 repoids[reponame] = repo.repo_id
87
87
88 # treat "it" as indexable filename
88 # treat "it" as indexable filename
89 filenames_mock = list(INDEX_FILENAMES)
89 filenames_mock = list(INDEX_FILENAMES)
90 filenames_mock.append('it')
90 filenames_mock.append('it')
91 with mock.patch('kallithea.lib.indexers.daemon.INDEX_FILENAMES',
91 with mock.patch('kallithea.lib.indexers.daemon.INDEX_FILENAMES',
92 filenames_mock):
92 filenames_mock):
93 rebuild_index(full_index=False) # only for newly added repos
93 rebuild_index(full_index=False) # only for newly added repos
94
94
95 @classmethod
95 @classmethod
96 def teardown_class(cls):
96 def teardown_class(cls):
97 # delete in reversed order, to delete fork destination at first
97 # delete in reversed order, to delete fork destination at first
98 for reponame, init_or_fork, groupname in reversed(repos):
98 for reponame, init_or_fork, groupname in reversed(repos):
99 RepoModel().delete(repoids[reponame])
99 RepoModel().delete(repoids[reponame])
100
100
101 for reponame, init_or_fork, groupname in reversed(repos):
101 for reponame, init_or_fork, groupname in reversed(repos):
102 if groupname in groupids:
102 if groupname in groupids:
103 RepoGroupModel().delete(groupids.pop(groupname),
103 RepoGroupModel().delete(groupids.pop(groupname),
104 force_delete=True)
104 force_delete=True)
105
105
106 Session().commit()
106 Session().commit()
107 Session.remove()
107 Session.remove()
108
108
109 rebuild_index(full_index=True) # rebuild fully for subsequent tests
109 rebuild_index(full_index=True) # rebuild fully for subsequent tests
110
110
111 @base.parametrize('reponame', [
111 @base.parametrize('reponame', [
112 ('indexing_test'),
112 ('indexing_test'),
113 ('indexing_test-fork'),
113 ('indexing_test-fork'),
114 ('group/indexing_test'),
114 ('group/indexing_test'),
115 ('this-is-it'),
115 ('this-is-it'),
116 ('*-fork'),
116 ('*-fork'),
117 ('group/*'),
117 ('group/*'),
118 ])
118 ])
119 @base.parametrize('searchtype,query,hit', [
119 @base.parametrize('searchtype,query,hit', [
120 ('content', 'this_should_be_unique_content', 1),
120 ('content', 'this_should_be_unique_content', 1),
121 ('commit', 'this_should_be_unique_commit_log', 1),
121 ('commit', 'this_should_be_unique_commit_log', 1),
122 ('path', 'this_should_be_unique_filename.txt', 1),
122 ('path', 'this_should_be_unique_filename.txt', 1),
123 ])
123 ])
124 def test_repository_tokenization(self, reponame, searchtype, query, hit):
124 def test_repository_tokenization(self, reponame, searchtype, query, hit):
125 self.log_user()
125 self.log_user()
126
126
127 q = 'repository:%s %s' % (reponame, query)
127 q = 'repository:%s %s' % (reponame, query)
128 response = self.app.get(base.url(controller='search', action='index'),
128 response = self.app.get(base.url(controller='search', action='index'),
129 {'q': q, 'type': searchtype})
129 {'q': q, 'type': searchtype})
130 response.mustcontain('>%d results' % hit)
130 response.mustcontain('>%d results' % hit)
131
131
132 @base.parametrize('reponame', [
132 @base.parametrize('reponame', [
133 ('indexing_test'),
133 ('indexing_test'),
134 ('indexing_test-fork'),
134 ('indexing_test-fork'),
135 ('group/indexing_test'),
135 ('group/indexing_test'),
136 ('this-is-it'),
136 ('this-is-it'),
137 ])
137 ])
138 @base.parametrize('searchtype,query,hit', [
138 @base.parametrize('searchtype,query,hit', [
139 ('content', 'this_should_be_unique_content', 1),
139 ('content', 'this_should_be_unique_content', 1),
140 ('commit', 'this_should_be_unique_commit_log', 1),
140 ('commit', 'this_should_be_unique_commit_log', 1),
141 ('path', 'this_should_be_unique_filename.txt', 1),
141 ('path', 'this_should_be_unique_filename.txt', 1),
142 ])
142 ])
143 def test_searching_under_repository(self, reponame, searchtype, query, hit):
143 def test_searching_under_repository(self, reponame, searchtype, query, hit):
144 self.log_user()
144 self.log_user()
145
145
146 response = self.app.get(base.url(controller='search', action='index',
146 response = self.app.get(base.url(controller='search', action='index',
147 repo_name=reponame),
147 repo_name=reponame),
148 {'q': query, 'type': searchtype})
148 {'q': query, 'type': searchtype})
149 response.mustcontain('>%d results' % hit)
149 response.mustcontain('>%d results' % hit)
150
150
151 @base.parametrize('searchtype,query,hit', [
151 @base.parametrize('searchtype,query,hit', [
152 ('content', 'path:this/is/it def test', 1),
152 ('content', 'path:this/is/it def test', 1),
153 ('commit', 'added:this/is/it bother to ask where', 1),
153 ('commit', 'added:this/is/it bother to ask where', 1),
154 # this condition matches against files below, because
154 # this condition matches against files below, because
155 # "path:" condition is also applied on "repository path".
155 # "path:" condition is also applied on "repository path".
156 # - "this/is/it" in "stopword_test" repo
156 # - "this/is/it" in "stopword_test" repo
157 # - "this_should_be_unique_filename.txt" in "this-is-it" repo
157 # - "this_should_be_unique_filename.txt" in "this-is-it" repo
158 ('path', 'this/is/it', 2),
158 ('path', 'this/is/it', 2),
159
159
160 ('content', 'extension:us', 1),
160 ('content', 'extension:us', 1),
161 ('path', 'extension:us', 1),
161 ('path', 'extension:us', 1),
162 ])
162 ])
163 def test_filename_stopword(self, searchtype, query, hit):
163 def test_filename_stopword(self, searchtype, query, hit):
164 response = self.app.get(base.url(controller='search', action='index'),
164 response = self.app.get(base.url(controller='search', action='index'),
165 {'q': query, 'type': searchtype})
165 {'q': query, 'type': searchtype})
166
166
167 response.mustcontain('>%d results' % hit)
167 response.mustcontain('>%d results' % hit)
168
168
169 @base.parametrize('searchtype,query,hit', [
169 @base.parametrize('searchtype,query,hit', [
170 # matching against both 2 files
170 # matching against both 2 files
171 ('content', 'owner:"this is it"', 0),
171 ('content', 'owner:"this is it"', 0),
172 ('content', 'owner:this-is-it', 0),
172 ('content', 'owner:this-is-it', 0),
173 ('path', 'owner:"this is it"', 0),
173 ('path', 'owner:"this is it"', 0),
174 ('path', 'owner:this-is-it', 0),
174 ('path', 'owner:this-is-it', 0),
175
175
176 # matching against both 2 revisions
176 # matching against both 2 revisions
177 ('commit', 'owner:"this is it"', 0),
177 ('commit', 'owner:"this is it"', 0),
178 ('commit', 'owner:"this-is-it"', 0),
178 ('commit', 'owner:"this-is-it"', 0),
179
179
180 # matching against only 1 revision
180 # matching against only 1 revision
181 ('commit', 'author:"this is it"', 1),
181 ('commit', 'author:"this is it"', 1),
182 ('commit', 'author:"this-is-it"', 1),
182 ('commit', 'author:"this-is-it"', 1),
183 ])
183 ])
184 def test_mailaddr_stopword(self, searchtype, query, hit):
184 def test_mailaddr_stopword(self, searchtype, query, hit):
185 response = self.app.get(base.url(controller='search', action='index'),
185 response = self.app.get(base.url(controller='search', action='index'),
186 {'q': query, 'type': searchtype})
186 {'q': query, 'type': searchtype})
187
187
188 response.mustcontain('>%d results' % hit)
188 response.mustcontain('>%d results' % hit)
@@ -1,642 +1,642 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # This program is free software: you can redistribute it and/or modify
2 # This program is free software: you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License as published by
3 # it under the terms of the GNU General Public License as published by
4 # the Free Software Foundation, either version 3 of the License, or
4 # the Free Software Foundation, either version 3 of the License, or
5 # (at your option) any later version.
5 # (at your option) any later version.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU General Public License
12 # You should have received a copy of the GNU General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 """
14 """
15 Test suite for vcs push/pull operations.
15 Test suite for vcs push/pull operations.
16
16
17 The tests need Git > 1.8.1.
17 The tests need Git > 1.8.1.
18
18
19 This file was forked by the Kallithea project in July 2014.
19 This file was forked by the Kallithea project in July 2014.
20 Original author and date, and relevant copyright and licensing information is below:
20 Original author and date, and relevant copyright and licensing information is below:
21 :created_on: Dec 30, 2010
21 :created_on: Dec 30, 2010
22 :author: marcink
22 :author: marcink
23 :copyright: (c) 2013 RhodeCode GmbH, and others.
23 :copyright: (c) 2013 RhodeCode GmbH, and others.
24 :license: GPLv3, see LICENSE.md for more details.
24 :license: GPLv3, see LICENSE.md for more details.
25
25
26 """
26 """
27
27
28 import json
28 import json
29 import os
29 import os
30 import re
30 import re
31 import tempfile
31 import tempfile
32 import time
32 import time
33 import urllib.request
33 import urllib.request
34 from subprocess import PIPE, Popen
34 from subprocess import PIPE, Popen
35 from tempfile import _RandomNameSequence
35 from tempfile import _RandomNameSequence
36
36
37 import pytest
37 import pytest
38
38
39 from kallithea import CONFIG
39 import kallithea
40 from kallithea.lib.utils2 import ascii_bytes, safe_str
40 from kallithea.lib.utils2 import ascii_bytes, safe_str
41 from kallithea.model.db import Repository, Ui, User, UserIpMap, UserLog
41 from kallithea.model.db import Repository, Ui, User, UserIpMap, UserLog
42 from kallithea.model.meta import Session
42 from kallithea.model.meta import Session
43 from kallithea.model.ssh_key import SshKeyModel
43 from kallithea.model.ssh_key import SshKeyModel
44 from kallithea.model.user import UserModel
44 from kallithea.model.user import UserModel
45 from kallithea.tests import base
45 from kallithea.tests import base
46 from kallithea.tests.fixture import Fixture
46 from kallithea.tests.fixture import Fixture
47
47
48
48
49 DEBUG = True
49 DEBUG = True
50 HOST = '127.0.0.1:4999' # test host
50 HOST = '127.0.0.1:4999' # test host
51
51
52 fixture = Fixture()
52 fixture = Fixture()
53
53
54
54
55 # Parameterize different kinds of VCS testing - both the kind of VCS and the
55 # Parameterize different kinds of VCS testing - both the kind of VCS and the
56 # access method (HTTP/SSH)
56 # access method (HTTP/SSH)
57
57
58 # Mixin for using HTTP and SSH URLs
58 # Mixin for using HTTP and SSH URLs
59 class HttpVcsTest(object):
59 class HttpVcsTest(object):
60 @staticmethod
60 @staticmethod
61 def repo_url_param(webserver, repo_name, **kwargs):
61 def repo_url_param(webserver, repo_name, **kwargs):
62 return webserver.repo_url(repo_name, **kwargs)
62 return webserver.repo_url(repo_name, **kwargs)
63
63
64 class SshVcsTest(object):
64 class SshVcsTest(object):
65 public_keys = {
65 public_keys = {
66 base.TEST_USER_REGULAR_LOGIN: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUQ== kallithea@localhost',
66 base.TEST_USER_REGULAR_LOGIN: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUQ== kallithea@localhost',
67 base.TEST_USER_ADMIN_LOGIN: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUq== kallithea@localhost',
67 base.TEST_USER_ADMIN_LOGIN: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUq== kallithea@localhost',
68 }
68 }
69
69
70 @classmethod
70 @classmethod
71 def repo_url_param(cls, webserver, repo_name, username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS, client_ip=base.IP_ADDR):
71 def repo_url_param(cls, webserver, repo_name, username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS, client_ip=base.IP_ADDR):
72 user = User.get_by_username(username)
72 user = User.get_by_username(username)
73 if user.ssh_keys:
73 if user.ssh_keys:
74 ssh_key = user.ssh_keys[0]
74 ssh_key = user.ssh_keys[0]
75 else:
75 else:
76 sshkeymodel = SshKeyModel()
76 sshkeymodel = SshKeyModel()
77 ssh_key = sshkeymodel.create(user, 'test key', cls.public_keys[user.username])
77 ssh_key = sshkeymodel.create(user, 'test key', cls.public_keys[user.username])
78 Session().commit()
78 Session().commit()
79
79
80 return cls._ssh_param(repo_name, user, ssh_key, client_ip)
80 return cls._ssh_param(repo_name, user, ssh_key, client_ip)
81
81
82 # Mixins for using Mercurial and Git
82 # Mixins for using Mercurial and Git
83 class HgVcsTest(object):
83 class HgVcsTest(object):
84 repo_type = 'hg'
84 repo_type = 'hg'
85 repo_name = base.HG_REPO
85 repo_name = base.HG_REPO
86
86
87 class GitVcsTest(object):
87 class GitVcsTest(object):
88 repo_type = 'git'
88 repo_type = 'git'
89 repo_name = base.GIT_REPO
89 repo_name = base.GIT_REPO
90
90
91 # Combine mixins to give the combinations we want to parameterize tests with
91 # Combine mixins to give the combinations we want to parameterize tests with
92 class HgHttpVcsTest(HgVcsTest, HttpVcsTest):
92 class HgHttpVcsTest(HgVcsTest, HttpVcsTest):
93 pass
93 pass
94
94
95 class GitHttpVcsTest(GitVcsTest, HttpVcsTest):
95 class GitHttpVcsTest(GitVcsTest, HttpVcsTest):
96 pass
96 pass
97
97
98 class HgSshVcsTest(HgVcsTest, SshVcsTest):
98 class HgSshVcsTest(HgVcsTest, SshVcsTest):
99 @staticmethod
99 @staticmethod
100 def _ssh_param(repo_name, user, ssh_key, client_ip):
100 def _ssh_param(repo_name, user, ssh_key, client_ip):
101 # Specify a custom ssh command on the command line
101 # Specify a custom ssh command on the command line
102 return r"""--config ui.ssh="bash -c 'SSH_ORIGINAL_COMMAND=\"\$2\" SSH_CONNECTION=\"%s 1024 127.0.0.1 22\" kallithea-cli ssh-serve -c %s %s %s' --" ssh://someuser@somehost/%s""" % (
102 return r"""--config ui.ssh="bash -c 'SSH_ORIGINAL_COMMAND=\"\$2\" SSH_CONNECTION=\"%s 1024 127.0.0.1 22\" kallithea-cli ssh-serve -c %s %s %s' --" ssh://someuser@somehost/%s""" % (
103 client_ip,
103 client_ip,
104 CONFIG['__file__'],
104 kallithea.CONFIG['__file__'],
105 user.user_id,
105 user.user_id,
106 ssh_key.user_ssh_key_id,
106 ssh_key.user_ssh_key_id,
107 repo_name)
107 repo_name)
108
108
109 class GitSshVcsTest(GitVcsTest, SshVcsTest):
109 class GitSshVcsTest(GitVcsTest, SshVcsTest):
110 @staticmethod
110 @staticmethod
111 def _ssh_param(repo_name, user, ssh_key, client_ip):
111 def _ssh_param(repo_name, user, ssh_key, client_ip):
112 # Set a custom ssh command in the global environment
112 # Set a custom ssh command in the global environment
113 os.environ['GIT_SSH_COMMAND'] = r"""bash -c 'SSH_ORIGINAL_COMMAND="$2" SSH_CONNECTION="%s 1024 127.0.0.1 22" kallithea-cli ssh-serve -c %s %s %s' --""" % (
113 os.environ['GIT_SSH_COMMAND'] = r"""bash -c 'SSH_ORIGINAL_COMMAND="$2" SSH_CONNECTION="%s 1024 127.0.0.1 22" kallithea-cli ssh-serve -c %s %s %s' --""" % (
114 client_ip,
114 client_ip,
115 CONFIG['__file__'],
115 kallithea.CONFIG['__file__'],
116 user.user_id,
116 user.user_id,
117 ssh_key.user_ssh_key_id)
117 ssh_key.user_ssh_key_id)
118 return "ssh://someuser@somehost/%s""" % repo_name
118 return "ssh://someuser@somehost/%s""" % repo_name
119
119
120 parametrize_vcs_test = base.parametrize('vt', [
120 parametrize_vcs_test = base.parametrize('vt', [
121 HgHttpVcsTest,
121 HgHttpVcsTest,
122 GitHttpVcsTest,
122 GitHttpVcsTest,
123 HgSshVcsTest,
123 HgSshVcsTest,
124 GitSshVcsTest,
124 GitSshVcsTest,
125 ])
125 ])
126 parametrize_vcs_test_hg = base.parametrize('vt', [
126 parametrize_vcs_test_hg = base.parametrize('vt', [
127 HgHttpVcsTest,
127 HgHttpVcsTest,
128 HgSshVcsTest,
128 HgSshVcsTest,
129 ])
129 ])
130 parametrize_vcs_test_http = base.parametrize('vt', [
130 parametrize_vcs_test_http = base.parametrize('vt', [
131 HgHttpVcsTest,
131 HgHttpVcsTest,
132 GitHttpVcsTest,
132 GitHttpVcsTest,
133 ])
133 ])
134
134
135 class Command(object):
135 class Command(object):
136
136
137 def __init__(self, cwd):
137 def __init__(self, cwd):
138 self.cwd = cwd
138 self.cwd = cwd
139
139
140 def execute(self, *args, **environ):
140 def execute(self, *args, **environ):
141 """
141 """
142 Runs command on the system with given ``args`` using simple space
142 Runs command on the system with given ``args`` using simple space
143 join without safe quoting.
143 join without safe quoting.
144 """
144 """
145 command = ' '.join(args)
145 command = ' '.join(args)
146 ignoreReturnCode = environ.pop('ignoreReturnCode', False)
146 ignoreReturnCode = environ.pop('ignoreReturnCode', False)
147 if DEBUG:
147 if DEBUG:
148 print('*** CMD %s ***' % command)
148 print('*** CMD %s ***' % command)
149 testenv = dict(os.environ)
149 testenv = dict(os.environ)
150 testenv['LANG'] = 'en_US.UTF-8'
150 testenv['LANG'] = 'en_US.UTF-8'
151 testenv['LANGUAGE'] = 'en_US:en'
151 testenv['LANGUAGE'] = 'en_US:en'
152 testenv['HGPLAIN'] = ''
152 testenv['HGPLAIN'] = ''
153 testenv['HGRCPATH'] = ''
153 testenv['HGRCPATH'] = ''
154 testenv.update(environ)
154 testenv.update(environ)
155 p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd, env=testenv)
155 p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd, env=testenv)
156 stdout, stderr = p.communicate()
156 stdout, stderr = p.communicate()
157 if DEBUG:
157 if DEBUG:
158 if stdout:
158 if stdout:
159 print('stdout:', stdout)
159 print('stdout:', stdout)
160 if stderr:
160 if stderr:
161 print('stderr:', stderr)
161 print('stderr:', stderr)
162 if not ignoreReturnCode:
162 if not ignoreReturnCode:
163 assert p.returncode == 0
163 assert p.returncode == 0
164 return safe_str(stdout), safe_str(stderr)
164 return safe_str(stdout), safe_str(stderr)
165
165
166
166
167 def _get_tmp_dir(prefix='vcs_operations-', suffix=''):
167 def _get_tmp_dir(prefix='vcs_operations-', suffix=''):
168 return tempfile.mkdtemp(dir=base.TESTS_TMP_PATH, prefix=prefix, suffix=suffix)
168 return tempfile.mkdtemp(dir=base.TESTS_TMP_PATH, prefix=prefix, suffix=suffix)
169
169
170
170
171 def _add_files(vcs, dest_dir, files_no=3):
171 def _add_files(vcs, dest_dir, files_no=3):
172 """
172 """
173 Generate some files, add it to dest_dir repo and push back
173 Generate some files, add it to dest_dir repo and push back
174 vcs is git or hg and defines what VCS we want to make those files for
174 vcs is git or hg and defines what VCS we want to make those files for
175
175
176 :param vcs:
176 :param vcs:
177 :param dest_dir:
177 :param dest_dir:
178 """
178 """
179 added_file = '%ssetup.py' % next(_RandomNameSequence())
179 added_file = '%ssetup.py' % next(_RandomNameSequence())
180 open(os.path.join(dest_dir, added_file), 'a').close()
180 open(os.path.join(dest_dir, added_file), 'a').close()
181 Command(dest_dir).execute(vcs, 'add', added_file)
181 Command(dest_dir).execute(vcs, 'add', added_file)
182
182
183 email = 'me@example.com'
183 email = 'me@example.com'
184 if os.name == 'nt':
184 if os.name == 'nt':
185 author_str = 'User <%s>' % email
185 author_str = 'User <%s>' % email
186 else:
186 else:
187 author_str = 'User ǝɯɐᴎ <%s>' % email
187 author_str = 'User ǝɯɐᴎ <%s>' % email
188 for i in range(files_no):
188 for i in range(files_no):
189 cmd = """echo "added_line%s" >> %s""" % (i, added_file)
189 cmd = """echo "added_line%s" >> %s""" % (i, added_file)
190 Command(dest_dir).execute(cmd)
190 Command(dest_dir).execute(cmd)
191 if vcs == 'hg':
191 if vcs == 'hg':
192 cmd = """hg commit -m "committed new %s" -u "%s" "%s" """ % (
192 cmd = """hg commit -m "committed new %s" -u "%s" "%s" """ % (
193 i, author_str, added_file
193 i, author_str, added_file
194 )
194 )
195 elif vcs == 'git':
195 elif vcs == 'git':
196 cmd = """git commit -m "committed new %s" --author "%s" "%s" """ % (
196 cmd = """git commit -m "committed new %s" --author "%s" "%s" """ % (
197 i, author_str, added_file
197 i, author_str, added_file
198 )
198 )
199 # git commit needs EMAIL on some machines
199 # git commit needs EMAIL on some machines
200 Command(dest_dir).execute(cmd, EMAIL=email)
200 Command(dest_dir).execute(cmd, EMAIL=email)
201
201
202 def _add_files_and_push(webserver, vt, dest_dir, clone_url, ignoreReturnCode=False, files_no=3):
202 def _add_files_and_push(webserver, vt, dest_dir, clone_url, ignoreReturnCode=False, files_no=3):
203 _add_files(vt.repo_type, dest_dir, files_no=files_no)
203 _add_files(vt.repo_type, dest_dir, files_no=files_no)
204 # PUSH it back
204 # PUSH it back
205 stdout = stderr = None
205 stdout = stderr = None
206 if vt.repo_type == 'hg':
206 if vt.repo_type == 'hg':
207 stdout, stderr = Command(dest_dir).execute('hg push -f --verbose', clone_url, ignoreReturnCode=ignoreReturnCode)
207 stdout, stderr = Command(dest_dir).execute('hg push -f --verbose', clone_url, ignoreReturnCode=ignoreReturnCode)
208 elif vt.repo_type == 'git':
208 elif vt.repo_type == 'git':
209 stdout, stderr = Command(dest_dir).execute('git push -f --verbose', clone_url, "master", ignoreReturnCode=ignoreReturnCode)
209 stdout, stderr = Command(dest_dir).execute('git push -f --verbose', clone_url, "master", ignoreReturnCode=ignoreReturnCode)
210
210
211 return stdout, stderr
211 return stdout, stderr
212
212
213
213
214 def _check_outgoing(vcs, cwd, clone_url):
214 def _check_outgoing(vcs, cwd, clone_url):
215 if vcs == 'hg':
215 if vcs == 'hg':
216 # hg removes the password from default URLs, so we have to provide it here via the clone_url
216 # hg removes the password from default URLs, so we have to provide it here via the clone_url
217 return Command(cwd).execute('hg -q outgoing', clone_url, ignoreReturnCode=True)
217 return Command(cwd).execute('hg -q outgoing', clone_url, ignoreReturnCode=True)
218 elif vcs == 'git':
218 elif vcs == 'git':
219 Command(cwd).execute('git remote update')
219 Command(cwd).execute('git remote update')
220 return Command(cwd).execute('git log origin/master..master')
220 return Command(cwd).execute('git log origin/master..master')
221
221
222
222
223 def set_anonymous_access(enable=True):
223 def set_anonymous_access(enable=True):
224 user = User.get_default_user()
224 user = User.get_default_user()
225 user.active = enable
225 user.active = enable
226 Session().commit()
226 Session().commit()
227 if enable != User.get_default_user().active:
227 if enable != User.get_default_user().active:
228 raise Exception('Cannot set anonymous access')
228 raise Exception('Cannot set anonymous access')
229
229
230
230
231 #==============================================================================
231 #==============================================================================
232 # TESTS
232 # TESTS
233 #==============================================================================
233 #==============================================================================
234
234
235
235
236 def _check_proper_git_push(stdout, stderr):
236 def _check_proper_git_push(stdout, stderr):
237 assert 'fatal' not in stderr
237 assert 'fatal' not in stderr
238 assert 'rejected' not in stderr
238 assert 'rejected' not in stderr
239 assert 'Pushing to' in stderr
239 assert 'Pushing to' in stderr
240 assert 'master -> master' in stderr
240 assert 'master -> master' in stderr
241
241
242
242
243 @pytest.mark.usefixtures("test_context_fixture")
243 @pytest.mark.usefixtures("test_context_fixture")
244 class TestVCSOperations(base.TestController):
244 class TestVCSOperations(base.TestController):
245
245
246 @classmethod
246 @classmethod
247 def setup_class(cls):
247 def setup_class(cls):
248 # DISABLE ANONYMOUS ACCESS
248 # DISABLE ANONYMOUS ACCESS
249 set_anonymous_access(False)
249 set_anonymous_access(False)
250
250
251 @pytest.fixture()
251 @pytest.fixture()
252 def testhook_cleanup(self):
252 def testhook_cleanup(self):
253 yield
253 yield
254 # remove hook
254 # remove hook
255 for hook in ['prechangegroup', 'pretxnchangegroup', 'preoutgoing', 'changegroup', 'outgoing', 'incoming']:
255 for hook in ['prechangegroup', 'pretxnchangegroup', 'preoutgoing', 'changegroup', 'outgoing', 'incoming']:
256 entry = Ui.get_by_key('hooks', '%s.testhook' % hook)
256 entry = Ui.get_by_key('hooks', '%s.testhook' % hook)
257 if entry:
257 if entry:
258 Session().delete(entry)
258 Session().delete(entry)
259 Session().commit()
259 Session().commit()
260
260
261 @pytest.fixture(scope="module")
261 @pytest.fixture(scope="module")
262 def testfork(self):
262 def testfork(self):
263 # create fork so the repo stays untouched
263 # create fork so the repo stays untouched
264 git_fork_name = '%s_fork%s' % (base.GIT_REPO, next(_RandomNameSequence()))
264 git_fork_name = '%s_fork%s' % (base.GIT_REPO, next(_RandomNameSequence()))
265 fixture.create_fork(base.GIT_REPO, git_fork_name)
265 fixture.create_fork(base.GIT_REPO, git_fork_name)
266 hg_fork_name = '%s_fork%s' % (base.HG_REPO, next(_RandomNameSequence()))
266 hg_fork_name = '%s_fork%s' % (base.HG_REPO, next(_RandomNameSequence()))
267 fixture.create_fork(base.HG_REPO, hg_fork_name)
267 fixture.create_fork(base.HG_REPO, hg_fork_name)
268 return {'git': git_fork_name, 'hg': hg_fork_name}
268 return {'git': git_fork_name, 'hg': hg_fork_name}
269
269
270 @parametrize_vcs_test
270 @parametrize_vcs_test
271 def test_clone_repo_by_admin(self, webserver, vt):
271 def test_clone_repo_by_admin(self, webserver, vt):
272 clone_url = vt.repo_url_param(webserver, vt.repo_name)
272 clone_url = vt.repo_url_param(webserver, vt.repo_name)
273 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir())
273 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir())
274
274
275 if vt.repo_type == 'git':
275 if vt.repo_type == 'git':
276 assert 'Cloning into' in stdout + stderr
276 assert 'Cloning into' in stdout + stderr
277 assert stderr == '' or stdout == ''
277 assert stderr == '' or stdout == ''
278 elif vt.repo_type == 'hg':
278 elif vt.repo_type == 'hg':
279 assert 'requesting all changes' in stdout
279 assert 'requesting all changes' in stdout
280 assert 'adding changesets' in stdout
280 assert 'adding changesets' in stdout
281 assert 'adding manifests' in stdout
281 assert 'adding manifests' in stdout
282 assert 'adding file changes' in stdout
282 assert 'adding file changes' in stdout
283 assert stderr == ''
283 assert stderr == ''
284
284
285 @parametrize_vcs_test_http
285 @parametrize_vcs_test_http
286 def test_clone_wrong_credentials(self, webserver, vt):
286 def test_clone_wrong_credentials(self, webserver, vt):
287 clone_url = vt.repo_url_param(webserver, vt.repo_name, password='bad!')
287 clone_url = vt.repo_url_param(webserver, vt.repo_name, password='bad!')
288 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
288 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
289 if vt.repo_type == 'git':
289 if vt.repo_type == 'git':
290 assert 'fatal: Authentication failed' in stderr
290 assert 'fatal: Authentication failed' in stderr
291 elif vt.repo_type == 'hg':
291 elif vt.repo_type == 'hg':
292 assert 'abort: authorization failed' in stderr
292 assert 'abort: authorization failed' in stderr
293
293
294 def test_clone_git_dir_as_hg(self, webserver):
294 def test_clone_git_dir_as_hg(self, webserver):
295 clone_url = HgHttpVcsTest.repo_url_param(webserver, base.GIT_REPO)
295 clone_url = HgHttpVcsTest.repo_url_param(webserver, base.GIT_REPO)
296 stdout, stderr = Command(base.TESTS_TMP_PATH).execute('hg clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
296 stdout, stderr = Command(base.TESTS_TMP_PATH).execute('hg clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
297 assert 'HTTP Error 404: Not Found' in stderr or "not a valid repository" in stdout and 'abort:' in stderr
297 assert 'HTTP Error 404: Not Found' in stderr or "not a valid repository" in stdout and 'abort:' in stderr
298
298
299 def test_clone_hg_repo_as_git(self, webserver):
299 def test_clone_hg_repo_as_git(self, webserver):
300 clone_url = GitHttpVcsTest.repo_url_param(webserver, base.HG_REPO)
300 clone_url = GitHttpVcsTest.repo_url_param(webserver, base.HG_REPO)
301 stdout, stderr = Command(base.TESTS_TMP_PATH).execute('git clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
301 stdout, stderr = Command(base.TESTS_TMP_PATH).execute('git clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
302 assert 'not found' in stderr
302 assert 'not found' in stderr
303
303
304 @parametrize_vcs_test
304 @parametrize_vcs_test
305 def test_clone_non_existing_path(self, webserver, vt):
305 def test_clone_non_existing_path(self, webserver, vt):
306 clone_url = vt.repo_url_param(webserver, 'trololo')
306 clone_url = vt.repo_url_param(webserver, 'trololo')
307 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
307 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
308 if vt.repo_type == 'git':
308 if vt.repo_type == 'git':
309 assert 'not found' in stderr or 'abort: Access to %r denied' % 'trololo' in stderr
309 assert 'not found' in stderr or 'abort: Access to %r denied' % 'trololo' in stderr
310 elif vt.repo_type == 'hg':
310 elif vt.repo_type == 'hg':
311 assert 'HTTP Error 404: Not Found' in stderr or 'abort: no suitable response from remote hg' in stderr and 'remote: abort: Access to %r denied' % 'trololo' in stdout + stderr
311 assert 'HTTP Error 404: Not Found' in stderr or 'abort: no suitable response from remote hg' in stderr and 'remote: abort: Access to %r denied' % 'trololo' in stdout + stderr
312
312
313 @parametrize_vcs_test
313 @parametrize_vcs_test
314 def test_push_new_repo(self, webserver, vt):
314 def test_push_new_repo(self, webserver, vt):
315 # Clear the log so we know what is added
315 # Clear the log so we know what is added
316 UserLog.query().delete()
316 UserLog.query().delete()
317 Session().commit()
317 Session().commit()
318
318
319 # Create an empty server repo using the API
319 # Create an empty server repo using the API
320 repo_name = 'new_%s_%s' % (vt.repo_type, next(_RandomNameSequence()))
320 repo_name = 'new_%s_%s' % (vt.repo_type, next(_RandomNameSequence()))
321 usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
321 usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
322 params = {
322 params = {
323 "id": 7,
323 "id": 7,
324 "api_key": usr.api_key,
324 "api_key": usr.api_key,
325 "method": 'create_repo',
325 "method": 'create_repo',
326 "args": dict(repo_name=repo_name,
326 "args": dict(repo_name=repo_name,
327 owner=base.TEST_USER_ADMIN_LOGIN,
327 owner=base.TEST_USER_ADMIN_LOGIN,
328 repo_type=vt.repo_type),
328 repo_type=vt.repo_type),
329 }
329 }
330 req = urllib.request.Request(
330 req = urllib.request.Request(
331 'http://%s:%s/_admin/api' % webserver.server_address,
331 'http://%s:%s/_admin/api' % webserver.server_address,
332 data=ascii_bytes(json.dumps(params)),
332 data=ascii_bytes(json.dumps(params)),
333 headers={'content-type': 'application/json'})
333 headers={'content-type': 'application/json'})
334 response = urllib.request.urlopen(req)
334 response = urllib.request.urlopen(req)
335 result = json.loads(response.read())
335 result = json.loads(response.read())
336 # Expect something like:
336 # Expect something like:
337 # {u'result': {u'msg': u'Created new repository `new_XXX`', u'task': None, u'success': True}, u'id': 7, u'error': None}
337 # {u'result': {u'msg': u'Created new repository `new_XXX`', u'task': None, u'success': True}, u'id': 7, u'error': None}
338 assert result['result']['success']
338 assert result['result']['success']
339
339
340 # Create local clone of the empty server repo
340 # Create local clone of the empty server repo
341 local_clone_dir = _get_tmp_dir()
341 local_clone_dir = _get_tmp_dir()
342 clone_url = vt.repo_url_param(webserver, repo_name)
342 clone_url = vt.repo_url_param(webserver, repo_name)
343 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, local_clone_dir)
343 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, local_clone_dir)
344
344
345 # Make 3 commits and push to the empty server repo.
345 # Make 3 commits and push to the empty server repo.
346 # The server repo doesn't have any other heads than the
346 # The server repo doesn't have any other heads than the
347 # refs/heads/master we are pushing, but the `git log` in the push hook
347 # refs/heads/master we are pushing, but the `git log` in the push hook
348 # should still list the 3 commits.
348 # should still list the 3 commits.
349 stdout, stderr = _add_files_and_push(webserver, vt, local_clone_dir, clone_url=clone_url)
349 stdout, stderr = _add_files_and_push(webserver, vt, local_clone_dir, clone_url=clone_url)
350 if vt.repo_type == 'git':
350 if vt.repo_type == 'git':
351 _check_proper_git_push(stdout, stderr)
351 _check_proper_git_push(stdout, stderr)
352 elif vt.repo_type == 'hg':
352 elif vt.repo_type == 'hg':
353 assert 'pushing to ' in stdout
353 assert 'pushing to ' in stdout
354 assert 'remote: added ' in stdout
354 assert 'remote: added ' in stdout
355
355
356 # Verify that we got the right events in UserLog. Expect something like:
356 # Verify that we got the right events in UserLog. Expect something like:
357 # <UserLog('id:new_git_XXX:started_following_repo')>
357 # <UserLog('id:new_git_XXX:started_following_repo')>
358 # <UserLog('id:new_git_XXX:user_created_repo')>
358 # <UserLog('id:new_git_XXX:user_created_repo')>
359 # <UserLog('id:new_git_XXX:pull')>
359 # <UserLog('id:new_git_XXX:pull')>
360 # <UserLog('id:new_git_XXX:push:aed9d4c1732a1927da3be42c47eb9afdc200d427,d38b083a07af10a9f44193486959a96a23db78da,4841ff9a2b385bec995f4679ef649adb3f437622')>
360 # <UserLog('id:new_git_XXX:push:aed9d4c1732a1927da3be42c47eb9afdc200d427,d38b083a07af10a9f44193486959a96a23db78da,4841ff9a2b385bec995f4679ef649adb3f437622')>
361 Session.close() # make sure SA fetches all new log entries (apparently only needed for MariaDB/MySQL ...)
361 Session.close() # make sure SA fetches all new log entries (apparently only needed for MariaDB/MySQL ...)
362 action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)]
362 action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)]
363 assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == ([
363 assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == ([
364 ('started_following_repo', 0),
364 ('started_following_repo', 0),
365 ('user_created_repo', 0),
365 ('user_created_repo', 0),
366 ('pull', 0),
366 ('pull', 0),
367 ('push', 3)]
367 ('push', 3)]
368 if vt.repo_type == 'git' else [
368 if vt.repo_type == 'git' else [
369 ('started_following_repo', 0),
369 ('started_following_repo', 0),
370 ('user_created_repo', 0),
370 ('user_created_repo', 0),
371 # (u'pull', 0), # Mercurial outgoing hook is not called for empty clones
371 # (u'pull', 0), # Mercurial outgoing hook is not called for empty clones
372 ('push', 3)])
372 ('push', 3)])
373
373
374 @parametrize_vcs_test
374 @parametrize_vcs_test
375 def test_push_new_file(self, webserver, testfork, vt):
375 def test_push_new_file(self, webserver, testfork, vt):
376 UserLog.query().delete()
376 UserLog.query().delete()
377 Session().commit()
377 Session().commit()
378
378
379 dest_dir = _get_tmp_dir()
379 dest_dir = _get_tmp_dir()
380 clone_url = vt.repo_url_param(webserver, vt.repo_name)
380 clone_url = vt.repo_url_param(webserver, vt.repo_name)
381 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
381 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
382
382
383 clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type])
383 clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type])
384 stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, clone_url=clone_url)
384 stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, clone_url=clone_url)
385
385
386 if vt.repo_type == 'git':
386 if vt.repo_type == 'git':
387 _check_proper_git_push(stdout, stderr)
387 _check_proper_git_push(stdout, stderr)
388 elif vt.repo_type == 'hg':
388 elif vt.repo_type == 'hg':
389 assert 'pushing to' in stdout
389 assert 'pushing to' in stdout
390 assert 'Repository size' in stdout
390 assert 'Repository size' in stdout
391 assert 'Last revision is now' in stdout
391 assert 'Last revision is now' in stdout
392
392
393 Session.close() # make sure SA fetches all new log entries (apparently only needed for MariaDB/MySQL ...)
393 Session.close() # make sure SA fetches all new log entries (apparently only needed for MariaDB/MySQL ...)
394 action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)]
394 action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)]
395 assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == \
395 assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == \
396 [('pull', 0), ('push', 3)]
396 [('pull', 0), ('push', 3)]
397
397
398 @parametrize_vcs_test
398 @parametrize_vcs_test
399 def test_pull(self, webserver, testfork, vt):
399 def test_pull(self, webserver, testfork, vt):
400 UserLog.query().delete()
400 UserLog.query().delete()
401 Session().commit()
401 Session().commit()
402
402
403 dest_dir = _get_tmp_dir()
403 dest_dir = _get_tmp_dir()
404 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'init', dest_dir)
404 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'init', dest_dir)
405
405
406 clone_url = vt.repo_url_param(webserver, vt.repo_name)
406 clone_url = vt.repo_url_param(webserver, vt.repo_name)
407 stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url)
407 stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url)
408 Session.close() # make sure SA fetches all new log entries (apparently only needed for MariaDB/MySQL ...)
408 Session.close() # make sure SA fetches all new log entries (apparently only needed for MariaDB/MySQL ...)
409
409
410 if vt.repo_type == 'git':
410 if vt.repo_type == 'git':
411 assert 'FETCH_HEAD' in stderr
411 assert 'FETCH_HEAD' in stderr
412 elif vt.repo_type == 'hg':
412 elif vt.repo_type == 'hg':
413 assert 'new changesets' in stdout
413 assert 'new changesets' in stdout
414
414
415 action_parts = [ul.action for ul in UserLog.query().order_by(UserLog.user_log_id)]
415 action_parts = [ul.action for ul in UserLog.query().order_by(UserLog.user_log_id)]
416 assert action_parts == ['pull']
416 assert action_parts == ['pull']
417
417
418 # Test handling of URLs with extra '/' around repo_name
418 # Test handling of URLs with extra '/' around repo_name
419 stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url.replace('/' + vt.repo_name, '/./%s/' % vt.repo_name), ignoreReturnCode=True)
419 stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url.replace('/' + vt.repo_name, '/./%s/' % vt.repo_name), ignoreReturnCode=True)
420 if issubclass(vt, HttpVcsTest):
420 if issubclass(vt, HttpVcsTest):
421 if vt.repo_type == 'git':
421 if vt.repo_type == 'git':
422 # NOTE: when pulling from http://hostname/./vcs_test_git/ , the git client will normalize that and issue an HTTP request to /vcs_test_git/info/refs
422 # NOTE: when pulling from http://hostname/./vcs_test_git/ , the git client will normalize that and issue an HTTP request to /vcs_test_git/info/refs
423 assert 'Already up to date.' in stdout
423 assert 'Already up to date.' in stdout
424 else:
424 else:
425 assert vt.repo_type == 'hg'
425 assert vt.repo_type == 'hg'
426 assert "abort: HTTP Error 404: Not Found" in stderr
426 assert "abort: HTTP Error 404: Not Found" in stderr
427 else:
427 else:
428 assert issubclass(vt, SshVcsTest)
428 assert issubclass(vt, SshVcsTest)
429 if vt.repo_type == 'git':
429 if vt.repo_type == 'git':
430 assert "abort: Access to './%s' denied" % vt.repo_name in stderr
430 assert "abort: Access to './%s' denied" % vt.repo_name in stderr
431 else:
431 else:
432 assert "abort: Access to './%s' denied" % vt.repo_name in stdout + stderr
432 assert "abort: Access to './%s' denied" % vt.repo_name in stdout + stderr
433
433
434 stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url.replace('/' + vt.repo_name, '/%s/' % vt.repo_name), ignoreReturnCode=True)
434 stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url.replace('/' + vt.repo_name, '/%s/' % vt.repo_name), ignoreReturnCode=True)
435 if vt.repo_type == 'git':
435 if vt.repo_type == 'git':
436 assert 'Already up to date.' in stdout
436 assert 'Already up to date.' in stdout
437 else:
437 else:
438 assert vt.repo_type == 'hg'
438 assert vt.repo_type == 'hg'
439 assert "no changes found" in stdout
439 assert "no changes found" in stdout
440 assert "denied" not in stderr
440 assert "denied" not in stderr
441 assert "denied" not in stdout
441 assert "denied" not in stdout
442 assert "404" not in stdout
442 assert "404" not in stdout
443
443
444 @parametrize_vcs_test
444 @parametrize_vcs_test
445 def test_push_invalidates_cache(self, webserver, testfork, vt):
445 def test_push_invalidates_cache(self, webserver, testfork, vt):
446 pre_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == testfork[vt.repo_type])]
446 pre_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == testfork[vt.repo_type])]
447
447
448 dest_dir = _get_tmp_dir()
448 dest_dir = _get_tmp_dir()
449 clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type])
449 clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type])
450 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
450 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
451
451
452 stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, files_no=1, clone_url=clone_url)
452 stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, files_no=1, clone_url=clone_url)
453
453
454 if vt.repo_type == 'git':
454 if vt.repo_type == 'git':
455 _check_proper_git_push(stdout, stderr)
455 _check_proper_git_push(stdout, stderr)
456
456
457 Session.close() # expire session to make sure SA fetches new Repository instances after last_changeset has been updated by server side hook in another process
457 Session.close() # expire session to make sure SA fetches new Repository instances after last_changeset has been updated by server side hook in another process
458 post_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == testfork[vt.repo_type])]
458 post_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == testfork[vt.repo_type])]
459 assert pre_cached_tip != post_cached_tip
459 assert pre_cached_tip != post_cached_tip
460
460
461 @parametrize_vcs_test_http
461 @parametrize_vcs_test_http
462 def test_push_wrong_credentials(self, webserver, vt):
462 def test_push_wrong_credentials(self, webserver, vt):
463 dest_dir = _get_tmp_dir()
463 dest_dir = _get_tmp_dir()
464 clone_url = vt.repo_url_param(webserver, vt.repo_name)
464 clone_url = vt.repo_url_param(webserver, vt.repo_name)
465 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
465 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
466
466
467 clone_url = webserver.repo_url(vt.repo_name, username='bad', password='name')
467 clone_url = webserver.repo_url(vt.repo_name, username='bad', password='name')
468 stdout, stderr = _add_files_and_push(webserver, vt, dest_dir,
468 stdout, stderr = _add_files_and_push(webserver, vt, dest_dir,
469 clone_url=clone_url, ignoreReturnCode=True)
469 clone_url=clone_url, ignoreReturnCode=True)
470
470
471 if vt.repo_type == 'git':
471 if vt.repo_type == 'git':
472 assert 'fatal: Authentication failed' in stderr
472 assert 'fatal: Authentication failed' in stderr
473 elif vt.repo_type == 'hg':
473 elif vt.repo_type == 'hg':
474 assert 'abort: authorization failed' in stderr
474 assert 'abort: authorization failed' in stderr
475
475
476 @parametrize_vcs_test
476 @parametrize_vcs_test
477 def test_push_with_readonly_credentials(self, webserver, vt):
477 def test_push_with_readonly_credentials(self, webserver, vt):
478 UserLog.query().delete()
478 UserLog.query().delete()
479 Session().commit()
479 Session().commit()
480
480
481 dest_dir = _get_tmp_dir()
481 dest_dir = _get_tmp_dir()
482 clone_url = vt.repo_url_param(webserver, vt.repo_name, username=base.TEST_USER_REGULAR_LOGIN, password=base.TEST_USER_REGULAR_PASS)
482 clone_url = vt.repo_url_param(webserver, vt.repo_name, username=base.TEST_USER_REGULAR_LOGIN, password=base.TEST_USER_REGULAR_PASS)
483 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
483 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
484
484
485 stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, ignoreReturnCode=True, clone_url=clone_url)
485 stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, ignoreReturnCode=True, clone_url=clone_url)
486
486
487 if vt.repo_type == 'git':
487 if vt.repo_type == 'git':
488 assert 'The requested URL returned error: 403' in stderr or 'abort: Push access to %r denied' % str(vt.repo_name) in stderr
488 assert 'The requested URL returned error: 403' in stderr or 'abort: Push access to %r denied' % str(vt.repo_name) in stderr
489 elif vt.repo_type == 'hg':
489 elif vt.repo_type == 'hg':
490 assert 'abort: HTTP Error 403: Forbidden' in stderr or 'abort: push failed on remote' in stderr and 'remote: Push access to %r denied' % str(vt.repo_name) in stdout
490 assert 'abort: HTTP Error 403: Forbidden' in stderr or 'abort: push failed on remote' in stderr and 'remote: Push access to %r denied' % str(vt.repo_name) in stdout
491
491
492 Session.close() # make sure SA fetches all new log entries (apparently only needed for MariaDB/MySQL ...)
492 Session.close() # make sure SA fetches all new log entries (apparently only needed for MariaDB/MySQL ...)
493 action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)]
493 action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)]
494 assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == \
494 assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == \
495 [('pull', 0)]
495 [('pull', 0)]
496
496
497 @parametrize_vcs_test
497 @parametrize_vcs_test
498 def test_push_back_to_wrong_url(self, webserver, vt):
498 def test_push_back_to_wrong_url(self, webserver, vt):
499 dest_dir = _get_tmp_dir()
499 dest_dir = _get_tmp_dir()
500 clone_url = vt.repo_url_param(webserver, vt.repo_name)
500 clone_url = vt.repo_url_param(webserver, vt.repo_name)
501 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
501 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
502
502
503 stdout, stderr = _add_files_and_push(
503 stdout, stderr = _add_files_and_push(
504 webserver, vt, dest_dir, clone_url='http://%s:%s/tmp' % (
504 webserver, vt, dest_dir, clone_url='http://%s:%s/tmp' % (
505 webserver.server_address[0], webserver.server_address[1]),
505 webserver.server_address[0], webserver.server_address[1]),
506 ignoreReturnCode=True)
506 ignoreReturnCode=True)
507
507
508 if vt.repo_type == 'git':
508 if vt.repo_type == 'git':
509 assert 'not found' in stderr
509 assert 'not found' in stderr
510 elif vt.repo_type == 'hg':
510 elif vt.repo_type == 'hg':
511 assert 'HTTP Error 404: Not Found' in stderr
511 assert 'HTTP Error 404: Not Found' in stderr
512
512
513 @parametrize_vcs_test
513 @parametrize_vcs_test
514 def test_ip_restriction(self, webserver, vt):
514 def test_ip_restriction(self, webserver, vt):
515 user_model = UserModel()
515 user_model = UserModel()
516 try:
516 try:
517 # Add IP constraint that excludes the test context:
517 # Add IP constraint that excludes the test context:
518 user_model.add_extra_ip(base.TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
518 user_model.add_extra_ip(base.TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
519 Session().commit()
519 Session().commit()
520 # IP permissions are cached, need to wait for the cache in the server process to expire
520 # IP permissions are cached, need to wait for the cache in the server process to expire
521 time.sleep(1.5)
521 time.sleep(1.5)
522 clone_url = vt.repo_url_param(webserver, vt.repo_name)
522 clone_url = vt.repo_url_param(webserver, vt.repo_name)
523 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
523 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
524 if vt.repo_type == 'git':
524 if vt.repo_type == 'git':
525 # The message apparently changed in Git 1.8.3, so match it loosely.
525 # The message apparently changed in Git 1.8.3, so match it loosely.
526 assert re.search(r'\b403\b', stderr) or 'abort: User test_admin from 127.0.0.127 cannot be authorized' in stderr
526 assert re.search(r'\b403\b', stderr) or 'abort: User test_admin from 127.0.0.127 cannot be authorized' in stderr
527 elif vt.repo_type == 'hg':
527 elif vt.repo_type == 'hg':
528 assert 'abort: HTTP Error 403: Forbidden' in stderr or 'remote: abort: User test_admin from 127.0.0.127 cannot be authorized' in stdout + stderr
528 assert 'abort: HTTP Error 403: Forbidden' in stderr or 'remote: abort: User test_admin from 127.0.0.127 cannot be authorized' in stdout + stderr
529 finally:
529 finally:
530 # release IP restrictions
530 # release IP restrictions
531 for ip in UserIpMap.query():
531 for ip in UserIpMap.query():
532 UserIpMap.delete(ip.ip_id)
532 UserIpMap.delete(ip.ip_id)
533 Session().commit()
533 Session().commit()
534 # IP permissions are cached, need to wait for the cache in the server process to expire
534 # IP permissions are cached, need to wait for the cache in the server process to expire
535 time.sleep(1.5)
535 time.sleep(1.5)
536
536
537 clone_url = vt.repo_url_param(webserver, vt.repo_name)
537 clone_url = vt.repo_url_param(webserver, vt.repo_name)
538 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir())
538 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir())
539
539
540 if vt.repo_type == 'git':
540 if vt.repo_type == 'git':
541 assert 'Cloning into' in stdout + stderr
541 assert 'Cloning into' in stdout + stderr
542 assert stderr == '' or stdout == ''
542 assert stderr == '' or stdout == ''
543 elif vt.repo_type == 'hg':
543 elif vt.repo_type == 'hg':
544 assert 'requesting all changes' in stdout
544 assert 'requesting all changes' in stdout
545 assert 'adding changesets' in stdout
545 assert 'adding changesets' in stdout
546 assert 'adding manifests' in stdout
546 assert 'adding manifests' in stdout
547 assert 'adding file changes' in stdout
547 assert 'adding file changes' in stdout
548
548
549 assert stderr == ''
549 assert stderr == ''
550
550
551 @parametrize_vcs_test_hg # git hooks doesn't work like hg hooks
551 @parametrize_vcs_test_hg # git hooks doesn't work like hg hooks
552 def test_custom_hooks_preoutgoing(self, testhook_cleanup, webserver, testfork, vt):
552 def test_custom_hooks_preoutgoing(self, testhook_cleanup, webserver, testfork, vt):
553 # set prechangegroup to failing hook (returns True)
553 # set prechangegroup to failing hook (returns True)
554 Ui.create_or_update_hook('preoutgoing.testhook', 'python:kallithea.tests.fixture.failing_test_hook')
554 Ui.create_or_update_hook('preoutgoing.testhook', 'python:kallithea.tests.fixture.failing_test_hook')
555 Session().commit()
555 Session().commit()
556 # clone repo
556 # clone repo
557 clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type], username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS)
557 clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type], username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS)
558 dest_dir = _get_tmp_dir()
558 dest_dir = _get_tmp_dir()
559 stdout, stderr = Command(base.TESTS_TMP_PATH) \
559 stdout, stderr = Command(base.TESTS_TMP_PATH) \
560 .execute(vt.repo_type, 'clone', clone_url, dest_dir, ignoreReturnCode=True)
560 .execute(vt.repo_type, 'clone', clone_url, dest_dir, ignoreReturnCode=True)
561 if vt.repo_type == 'hg':
561 if vt.repo_type == 'hg':
562 assert 'preoutgoing.testhook hook failed' in stdout
562 assert 'preoutgoing.testhook hook failed' in stdout
563 elif vt.repo_type == 'git':
563 elif vt.repo_type == 'git':
564 assert 'error: 406' in stderr
564 assert 'error: 406' in stderr
565
565
566 @parametrize_vcs_test_hg # git hooks doesn't work like hg hooks
566 @parametrize_vcs_test_hg # git hooks doesn't work like hg hooks
567 def test_custom_hooks_prechangegroup(self, testhook_cleanup, webserver, testfork, vt):
567 def test_custom_hooks_prechangegroup(self, testhook_cleanup, webserver, testfork, vt):
568 # set prechangegroup to failing hook (returns exit code 1)
568 # set prechangegroup to failing hook (returns exit code 1)
569 Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.failing_test_hook')
569 Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.failing_test_hook')
570 Session().commit()
570 Session().commit()
571 # clone repo
571 # clone repo
572 clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type], username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS)
572 clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type], username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS)
573 dest_dir = _get_tmp_dir()
573 dest_dir = _get_tmp_dir()
574 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
574 stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
575
575
576 stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, clone_url,
576 stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, clone_url,
577 ignoreReturnCode=True)
577 ignoreReturnCode=True)
578 assert 'failing_test_hook failed' in stdout + stderr
578 assert 'failing_test_hook failed' in stdout + stderr
579 assert 'Traceback' not in stdout + stderr
579 assert 'Traceback' not in stdout + stderr
580 assert 'prechangegroup.testhook hook failed' in stdout + stderr
580 assert 'prechangegroup.testhook hook failed' in stdout + stderr
581 # there are still outgoing changesets
581 # there are still outgoing changesets
582 stdout, stderr = _check_outgoing(vt.repo_type, dest_dir, clone_url)
582 stdout, stderr = _check_outgoing(vt.repo_type, dest_dir, clone_url)
583 assert stdout != ''
583 assert stdout != ''
584
584
585 # set prechangegroup hook to exception throwing method
585 # set prechangegroup hook to exception throwing method
586 Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.exception_test_hook')
586 Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.exception_test_hook')
587 Session().commit()
587 Session().commit()
588 # re-try to push
588 # re-try to push
589 stdout, stderr = Command(dest_dir).execute('%s push' % vt.repo_type, clone_url, ignoreReturnCode=True)
589 stdout, stderr = Command(dest_dir).execute('%s push' % vt.repo_type, clone_url, ignoreReturnCode=True)
590 if vt is HgHttpVcsTest:
590 if vt is HgHttpVcsTest:
591 # like with 'hg serve...' 'HTTP Error 500: INTERNAL SERVER ERROR' should be returned
591 # like with 'hg serve...' 'HTTP Error 500: INTERNAL SERVER ERROR' should be returned
592 assert 'HTTP Error 500: INTERNAL SERVER ERROR' in stderr
592 assert 'HTTP Error 500: INTERNAL SERVER ERROR' in stderr
593 elif vt is HgSshVcsTest:
593 elif vt is HgSshVcsTest:
594 assert 'remote: Exception: exception_test_hook threw an exception' in stdout
594 assert 'remote: Exception: exception_test_hook threw an exception' in stdout
595 else: assert False
595 else: assert False
596 # there are still outgoing changesets
596 # there are still outgoing changesets
597 stdout, stderr = _check_outgoing(vt.repo_type, dest_dir, clone_url)
597 stdout, stderr = _check_outgoing(vt.repo_type, dest_dir, clone_url)
598 assert stdout != ''
598 assert stdout != ''
599
599
600 # set prechangegroup hook to method that returns False
600 # set prechangegroup hook to method that returns False
601 Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.passing_test_hook')
601 Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.passing_test_hook')
602 Session().commit()
602 Session().commit()
603 # re-try to push
603 # re-try to push
604 stdout, stderr = Command(dest_dir).execute('%s push' % vt.repo_type, clone_url, ignoreReturnCode=True)
604 stdout, stderr = Command(dest_dir).execute('%s push' % vt.repo_type, clone_url, ignoreReturnCode=True)
605 assert 'passing_test_hook succeeded' in stdout + stderr
605 assert 'passing_test_hook succeeded' in stdout + stderr
606 assert 'Traceback' not in stdout + stderr
606 assert 'Traceback' not in stdout + stderr
607 assert 'prechangegroup.testhook hook failed' not in stdout + stderr
607 assert 'prechangegroup.testhook hook failed' not in stdout + stderr
608 # no more outgoing changesets
608 # no more outgoing changesets
609 stdout, stderr = _check_outgoing(vt.repo_type, dest_dir, clone_url)
609 stdout, stderr = _check_outgoing(vt.repo_type, dest_dir, clone_url)
610 assert stdout == ''
610 assert stdout == ''
611 assert stderr == ''
611 assert stderr == ''
612
612
613 def test_add_submodule_git(self, webserver, testfork):
613 def test_add_submodule_git(self, webserver, testfork):
614 dest_dir = _get_tmp_dir()
614 dest_dir = _get_tmp_dir()
615 clone_url = GitHttpVcsTest.repo_url_param(webserver, base.GIT_REPO)
615 clone_url = GitHttpVcsTest.repo_url_param(webserver, base.GIT_REPO)
616
616
617 fork_url = GitHttpVcsTest.repo_url_param(webserver, testfork['git'])
617 fork_url = GitHttpVcsTest.repo_url_param(webserver, testfork['git'])
618
618
619 # add submodule
619 # add submodule
620 stdout, stderr = Command(base.TESTS_TMP_PATH).execute('git clone', fork_url, dest_dir)
620 stdout, stderr = Command(base.TESTS_TMP_PATH).execute('git clone', fork_url, dest_dir)
621 stdout, stderr = Command(dest_dir).execute('git submodule add', clone_url, 'testsubmodule')
621 stdout, stderr = Command(dest_dir).execute('git submodule add', clone_url, 'testsubmodule')
622 stdout, stderr = Command(dest_dir).execute('git commit -am "added testsubmodule pointing to', clone_url, '"', EMAIL=base.TEST_USER_ADMIN_EMAIL)
622 stdout, stderr = Command(dest_dir).execute('git commit -am "added testsubmodule pointing to', clone_url, '"', EMAIL=base.TEST_USER_ADMIN_EMAIL)
623 stdout, stderr = Command(dest_dir).execute('git push', fork_url, 'master')
623 stdout, stderr = Command(dest_dir).execute('git push', fork_url, 'master')
624
624
625 # check for testsubmodule link in files page
625 # check for testsubmodule link in files page
626 self.log_user()
626 self.log_user()
627 response = self.app.get(base.url(controller='files', action='index',
627 response = self.app.get(base.url(controller='files', action='index',
628 repo_name=testfork['git'],
628 repo_name=testfork['git'],
629 revision='tip',
629 revision='tip',
630 f_path='/'))
630 f_path='/'))
631 # check _repo_files_url that will be used to reload as AJAX
631 # check _repo_files_url that will be used to reload as AJAX
632 response.mustcontain('var _repo_files_url = ("/%s/files/");' % testfork['git'])
632 response.mustcontain('var _repo_files_url = ("/%s/files/");' % testfork['git'])
633
633
634 response.mustcontain('<a class="submodule-dir" href="%s" target="_blank"><i class="icon-file-submodule"></i><span>testsubmodule @ ' % clone_url)
634 response.mustcontain('<a class="submodule-dir" href="%s" target="_blank"><i class="icon-file-submodule"></i><span>testsubmodule @ ' % clone_url)
635
635
636 # check that following a submodule link actually works - and redirects
636 # check that following a submodule link actually works - and redirects
637 response = self.app.get(base.url(controller='files', action='index',
637 response = self.app.get(base.url(controller='files', action='index',
638 repo_name=testfork['git'],
638 repo_name=testfork['git'],
639 revision='tip',
639 revision='tip',
640 f_path='/testsubmodule'),
640 f_path='/testsubmodule'),
641 status=302)
641 status=302)
642 assert response.location == clone_url
642 assert response.location == clone_url
General Comments 0
You need to be logged in to leave comments. Login now