##// END OF EJS Templates
caches: use .refresh() instead of .invalidate()...
marcink -
r2939:913f92bf default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,242 +1,238 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2017-2018 RhodeCode GmbH
3 # Copyright (C) 2017-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import pytz
20 import pytz
21 import logging
21 import logging
22
22
23 from pyramid.view import view_config
23 from pyramid.view import view_config
24 from pyramid.response import Response
24 from pyramid.response import Response
25 from webhelpers.feedgenerator import Rss201rev2Feed, Atom1Feed
25 from webhelpers.feedgenerator import Rss201rev2Feed, Atom1Feed
26
26
27 from rhodecode.apps._base import RepoAppView
27 from rhodecode.apps._base import RepoAppView
28 from rhodecode.lib import audit_logger
28 from rhodecode.lib import audit_logger
29 from rhodecode.lib import rc_cache
29 from rhodecode.lib import rc_cache
30 from rhodecode.lib import helpers as h
30 from rhodecode.lib import helpers as h
31 from rhodecode.lib.auth import (
31 from rhodecode.lib.auth import (
32 LoginRequired, HasRepoPermissionAnyDecorator)
32 LoginRequired, HasRepoPermissionAnyDecorator)
33 from rhodecode.lib.diffs import DiffProcessor, LimitedDiffContainer
33 from rhodecode.lib.diffs import DiffProcessor, LimitedDiffContainer
34 from rhodecode.lib.utils2 import str2bool, safe_int, md5_safe
34 from rhodecode.lib.utils2 import str2bool, safe_int, md5_safe
35 from rhodecode.model.db import UserApiKeys, CacheKey
35 from rhodecode.model.db import UserApiKeys, CacheKey
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 class RepoFeedView(RepoAppView):
40 class RepoFeedView(RepoAppView):
41 def load_default_context(self):
41 def load_default_context(self):
42 c = self._get_local_tmpl_context()
42 c = self._get_local_tmpl_context()
43
43
44
44
45 self._load_defaults()
45 self._load_defaults()
46 return c
46 return c
47
47
48 def _get_config(self):
48 def _get_config(self):
49 import rhodecode
49 import rhodecode
50 config = rhodecode.CONFIG
50 config = rhodecode.CONFIG
51
51
52 return {
52 return {
53 'language': 'en-us',
53 'language': 'en-us',
54 'feed_ttl': '5', # TTL of feed,
54 'feed_ttl': '5', # TTL of feed,
55 'feed_include_diff':
55 'feed_include_diff':
56 str2bool(config.get('rss_include_diff', False)),
56 str2bool(config.get('rss_include_diff', False)),
57 'feed_items_per_page':
57 'feed_items_per_page':
58 safe_int(config.get('rss_items_per_page', 20)),
58 safe_int(config.get('rss_items_per_page', 20)),
59 'feed_diff_limit':
59 'feed_diff_limit':
60 # we need to protect from parsing huge diffs here other way
60 # we need to protect from parsing huge diffs here other way
61 # we can kill the server
61 # we can kill the server
62 safe_int(config.get('rss_cut_off_limit', 32 * 1024)),
62 safe_int(config.get('rss_cut_off_limit', 32 * 1024)),
63 }
63 }
64
64
65 def _load_defaults(self):
65 def _load_defaults(self):
66 _ = self.request.translate
66 _ = self.request.translate
67 config = self._get_config()
67 config = self._get_config()
68 # common values for feeds
68 # common values for feeds
69 self.description = _('Changes on %s repository')
69 self.description = _('Changes on %s repository')
70 self.title = self.title = _('%s %s feed') % (self.db_repo_name, '%s')
70 self.title = self.title = _('%s %s feed') % (self.db_repo_name, '%s')
71 self.language = config["language"]
71 self.language = config["language"]
72 self.ttl = config["feed_ttl"]
72 self.ttl = config["feed_ttl"]
73 self.feed_include_diff = config['feed_include_diff']
73 self.feed_include_diff = config['feed_include_diff']
74 self.feed_diff_limit = config['feed_diff_limit']
74 self.feed_diff_limit = config['feed_diff_limit']
75 self.feed_items_per_page = config['feed_items_per_page']
75 self.feed_items_per_page = config['feed_items_per_page']
76
76
77 def _changes(self, commit):
77 def _changes(self, commit):
78 diff_processor = DiffProcessor(
78 diff_processor = DiffProcessor(
79 commit.diff(), diff_limit=self.feed_diff_limit)
79 commit.diff(), diff_limit=self.feed_diff_limit)
80 _parsed = diff_processor.prepare(inline_diff=False)
80 _parsed = diff_processor.prepare(inline_diff=False)
81 limited_diff = isinstance(_parsed, LimitedDiffContainer)
81 limited_diff = isinstance(_parsed, LimitedDiffContainer)
82
82
83 return diff_processor, _parsed, limited_diff
83 return diff_processor, _parsed, limited_diff
84
84
85 def _get_title(self, commit):
85 def _get_title(self, commit):
86 return h.shorter(commit.message, 160)
86 return h.shorter(commit.message, 160)
87
87
88 def _get_description(self, commit):
88 def _get_description(self, commit):
89 _renderer = self.request.get_partial_renderer(
89 _renderer = self.request.get_partial_renderer(
90 'rhodecode:templates/feed/atom_feed_entry.mako')
90 'rhodecode:templates/feed/atom_feed_entry.mako')
91 diff_processor, parsed_diff, limited_diff = self._changes(commit)
91 diff_processor, parsed_diff, limited_diff = self._changes(commit)
92 filtered_parsed_diff, has_hidden_changes = self.path_filter.filter_patchset(parsed_diff)
92 filtered_parsed_diff, has_hidden_changes = self.path_filter.filter_patchset(parsed_diff)
93 return _renderer(
93 return _renderer(
94 'body',
94 'body',
95 commit=commit,
95 commit=commit,
96 parsed_diff=filtered_parsed_diff,
96 parsed_diff=filtered_parsed_diff,
97 limited_diff=limited_diff,
97 limited_diff=limited_diff,
98 feed_include_diff=self.feed_include_diff,
98 feed_include_diff=self.feed_include_diff,
99 diff_processor=diff_processor,
99 diff_processor=diff_processor,
100 has_hidden_changes=has_hidden_changes
100 has_hidden_changes=has_hidden_changes
101 )
101 )
102
102
103 def _set_timezone(self, date, tzinfo=pytz.utc):
103 def _set_timezone(self, date, tzinfo=pytz.utc):
104 if not getattr(date, "tzinfo", None):
104 if not getattr(date, "tzinfo", None):
105 date.replace(tzinfo=tzinfo)
105 date.replace(tzinfo=tzinfo)
106 return date
106 return date
107
107
108 def _get_commits(self):
108 def _get_commits(self):
109 return list(self.rhodecode_vcs_repo[-self.feed_items_per_page:])
109 return list(self.rhodecode_vcs_repo[-self.feed_items_per_page:])
110
110
111 def uid(self, repo_id, commit_id):
111 def uid(self, repo_id, commit_id):
112 return '{}:{}'.format(md5_safe(repo_id), md5_safe(commit_id))
112 return '{}:{}'.format(md5_safe(repo_id), md5_safe(commit_id))
113
113
114 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED])
114 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED])
115 @HasRepoPermissionAnyDecorator(
115 @HasRepoPermissionAnyDecorator(
116 'repository.read', 'repository.write', 'repository.admin')
116 'repository.read', 'repository.write', 'repository.admin')
117 @view_config(
117 @view_config(
118 route_name='atom_feed_home', request_method='GET',
118 route_name='atom_feed_home', request_method='GET',
119 renderer=None)
119 renderer=None)
120 def atom(self):
120 def atom(self):
121 """
121 """
122 Produce an atom-1.0 feed via feedgenerator module
122 Produce an atom-1.0 feed via feedgenerator module
123 """
123 """
124 self.load_default_context()
124 self.load_default_context()
125
125
126 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
126 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
127 self.db_repo.repo_id, CacheKey.CACHE_TYPE_FEED)
127 self.db_repo.repo_id, CacheKey.CACHE_TYPE_FEED)
128 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
128 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
129 repo_id=self.db_repo.repo_id)
129 repo_id=self.db_repo.repo_id)
130
130
131 region = rc_cache.get_or_create_region('cache_repo_longterm',
131 region = rc_cache.get_or_create_region('cache_repo_longterm',
132 cache_namespace_uid)
132 cache_namespace_uid)
133
133
134 condition = not self.path_filter.is_enabled
134 condition = not self.path_filter.is_enabled
135
135
136 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
136 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
137 condition=condition)
137 condition=condition)
138 def generate_atom_feed(repo_id, _repo_name, _feed_type):
138 def generate_atom_feed(repo_id, _repo_name, _feed_type):
139 feed = Atom1Feed(
139 feed = Atom1Feed(
140 title=self.title % _repo_name,
140 title=self.title % _repo_name,
141 link=h.route_url('repo_summary', repo_name=_repo_name),
141 link=h.route_url('repo_summary', repo_name=_repo_name),
142 description=self.description % _repo_name,
142 description=self.description % _repo_name,
143 language=self.language,
143 language=self.language,
144 ttl=self.ttl
144 ttl=self.ttl
145 )
145 )
146
146
147 for commit in reversed(self._get_commits()):
147 for commit in reversed(self._get_commits()):
148 date = self._set_timezone(commit.date)
148 date = self._set_timezone(commit.date)
149 feed.add_item(
149 feed.add_item(
150 unique_id=self.uid(repo_id, commit.raw_id),
150 unique_id=self.uid(repo_id, commit.raw_id),
151 title=self._get_title(commit),
151 title=self._get_title(commit),
152 author_name=commit.author,
152 author_name=commit.author,
153 description=self._get_description(commit),
153 description=self._get_description(commit),
154 link=h.route_url(
154 link=h.route_url(
155 'repo_commit', repo_name=_repo_name,
155 'repo_commit', repo_name=_repo_name,
156 commit_id=commit.raw_id),
156 commit_id=commit.raw_id),
157 pubdate=date,)
157 pubdate=date,)
158
158
159 return feed.mime_type, feed.writeString('utf-8')
159 return feed.mime_type, feed.writeString('utf-8')
160
160
161 inv_context_manager = rc_cache.InvalidationContext(
161 inv_context_manager = rc_cache.InvalidationContext(
162 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
162 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
163 with inv_context_manager as invalidation_context:
163 with inv_context_manager as invalidation_context:
164 # check for stored invalidation signal, and maybe purge the cache
164 args = (self.db_repo.repo_id, self.db_repo.repo_name, 'atom',)
165 # before computing it again
165 # re-compute and store cache if we get invalidate signal
166 if invalidation_context.should_invalidate():
166 if invalidation_context.should_invalidate():
167 generate_atom_feed.invalidate(
167 mime_type, feed = generate_atom_feed.refresh(*args)
168 self.db_repo.repo_id, self.db_repo.repo_name, 'atom')
168 else:
169
169 mime_type, feed = generate_atom_feed(*args)
170 mime_type, feed = generate_atom_feed(
171 self.db_repo.repo_id, self.db_repo.repo_name, 'atom')
172
170
173 log.debug('Repo ATOM feed computed in %.3fs',
171 log.debug('Repo ATOM feed computed in %.3fs',
174 inv_context_manager.compute_time)
172 inv_context_manager.compute_time)
175
173
176 response = Response(feed)
174 response = Response(feed)
177 response.content_type = mime_type
175 response.content_type = mime_type
178 return response
176 return response
179
177
180 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED])
178 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED])
181 @HasRepoPermissionAnyDecorator(
179 @HasRepoPermissionAnyDecorator(
182 'repository.read', 'repository.write', 'repository.admin')
180 'repository.read', 'repository.write', 'repository.admin')
183 @view_config(
181 @view_config(
184 route_name='rss_feed_home', request_method='GET',
182 route_name='rss_feed_home', request_method='GET',
185 renderer=None)
183 renderer=None)
186 def rss(self):
184 def rss(self):
187 """
185 """
188 Produce an rss2 feed via feedgenerator module
186 Produce an rss2 feed via feedgenerator module
189 """
187 """
190 self.load_default_context()
188 self.load_default_context()
191
189
192 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
190 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
193 self.db_repo.repo_id, CacheKey.CACHE_TYPE_FEED)
191 self.db_repo.repo_id, CacheKey.CACHE_TYPE_FEED)
194 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
192 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
195 repo_id=self.db_repo.repo_id)
193 repo_id=self.db_repo.repo_id)
196 region = rc_cache.get_or_create_region('cache_repo_longterm',
194 region = rc_cache.get_or_create_region('cache_repo_longterm',
197 cache_namespace_uid)
195 cache_namespace_uid)
198
196
199 condition = not self.path_filter.is_enabled
197 condition = not self.path_filter.is_enabled
200
198
201 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
199 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
202 condition=condition)
200 condition=condition)
203 def generate_rss_feed(repo_id, _repo_name, _feed_type):
201 def generate_rss_feed(repo_id, _repo_name, _feed_type):
204 feed = Rss201rev2Feed(
202 feed = Rss201rev2Feed(
205 title=self.title % _repo_name,
203 title=self.title % _repo_name,
206 link=h.route_url('repo_summary', repo_name=_repo_name),
204 link=h.route_url('repo_summary', repo_name=_repo_name),
207 description=self.description % _repo_name,
205 description=self.description % _repo_name,
208 language=self.language,
206 language=self.language,
209 ttl=self.ttl
207 ttl=self.ttl
210 )
208 )
211
209
212 for commit in reversed(self._get_commits()):
210 for commit in reversed(self._get_commits()):
213 date = self._set_timezone(commit.date)
211 date = self._set_timezone(commit.date)
214 feed.add_item(
212 feed.add_item(
215 unique_id=self.uid(repo_id, commit.raw_id),
213 unique_id=self.uid(repo_id, commit.raw_id),
216 title=self._get_title(commit),
214 title=self._get_title(commit),
217 author_name=commit.author,
215 author_name=commit.author,
218 description=self._get_description(commit),
216 description=self._get_description(commit),
219 link=h.route_url(
217 link=h.route_url(
220 'repo_commit', repo_name=_repo_name,
218 'repo_commit', repo_name=_repo_name,
221 commit_id=commit.raw_id),
219 commit_id=commit.raw_id),
222 pubdate=date,)
220 pubdate=date,)
223
221
224 return feed.mime_type, feed.writeString('utf-8')
222 return feed.mime_type, feed.writeString('utf-8')
225
223
226 inv_context_manager = rc_cache.InvalidationContext(
224 inv_context_manager = rc_cache.InvalidationContext(
227 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
225 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
228 with inv_context_manager as invalidation_context:
226 with inv_context_manager as invalidation_context:
229 # check for stored invalidation signal, and maybe purge the cache
227 args = (self.db_repo.repo_id, self.db_repo.repo_name, 'rss',)
230 # before computing it again
228 # re-compute and store cache if we get invalidate signal
231 if invalidation_context.should_invalidate():
229 if invalidation_context.should_invalidate():
232 generate_rss_feed.invalidate(
230 mime_type, feed = generate_rss_feed.refresh(*args)
233 self.db_repo.repo_id, self.db_repo.repo_name, 'rss')
231 else:
234
232 mime_type, feed = generate_rss_feed(*args)
235 mime_type, feed = generate_rss_feed(
236 self.db_repo.repo_id, self.db_repo.repo_name, 'rss')
237 log.debug(
233 log.debug(
238 'Repo RSS feed computed in %.3fs', inv_context_manager.compute_time)
234 'Repo RSS feed computed in %.3fs', inv_context_manager.compute_time)
239
235
240 response = Response(feed)
236 response = Response(feed)
241 response.content_type = mime_type
237 response.content_type = mime_type
242 return response
238 return response
@@ -1,392 +1,390 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2018 RhodeCode GmbH
3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import string
22 import string
23 import rhodecode
23 import rhodecode
24
24
25 from pyramid.view import view_config
25 from pyramid.view import view_config
26
26
27 from rhodecode.controllers import utils
27 from rhodecode.controllers import utils
28 from rhodecode.apps._base import RepoAppView
28 from rhodecode.apps._base import RepoAppView
29 from rhodecode.config.conf import (LANGUAGES_EXTENSIONS_MAP)
29 from rhodecode.config.conf import (LANGUAGES_EXTENSIONS_MAP)
30 from rhodecode.lib import helpers as h, rc_cache
30 from rhodecode.lib import helpers as h, rc_cache
31 from rhodecode.lib.utils2 import safe_str, safe_int
31 from rhodecode.lib.utils2 import safe_str, safe_int
32 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
32 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
33 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
33 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
34 from rhodecode.lib.ext_json import json
34 from rhodecode.lib.ext_json import json
35 from rhodecode.lib.vcs.backends.base import EmptyCommit
35 from rhodecode.lib.vcs.backends.base import EmptyCommit
36 from rhodecode.lib.vcs.exceptions import (
36 from rhodecode.lib.vcs.exceptions import (
37 CommitError, EmptyRepositoryError, CommitDoesNotExistError)
37 CommitError, EmptyRepositoryError, CommitDoesNotExistError)
38 from rhodecode.model.db import Statistics, CacheKey, User
38 from rhodecode.model.db import Statistics, CacheKey, User
39 from rhodecode.model.meta import Session
39 from rhodecode.model.meta import Session
40 from rhodecode.model.repo import ReadmeFinder
40 from rhodecode.model.repo import ReadmeFinder
41 from rhodecode.model.scm import ScmModel
41 from rhodecode.model.scm import ScmModel
42
42
43 log = logging.getLogger(__name__)
43 log = logging.getLogger(__name__)
44
44
45
45
46 class RepoSummaryView(RepoAppView):
46 class RepoSummaryView(RepoAppView):
47
47
48 def load_default_context(self):
48 def load_default_context(self):
49 c = self._get_local_tmpl_context(include_app_defaults=True)
49 c = self._get_local_tmpl_context(include_app_defaults=True)
50 c.rhodecode_repo = None
50 c.rhodecode_repo = None
51 if not c.repository_requirements_missing:
51 if not c.repository_requirements_missing:
52 c.rhodecode_repo = self.rhodecode_vcs_repo
52 c.rhodecode_repo = self.rhodecode_vcs_repo
53 return c
53 return c
54
54
55 def _get_readme_data(self, db_repo, renderer_type):
55 def _get_readme_data(self, db_repo, renderer_type):
56
56
57 log.debug('Looking for README file')
57 log.debug('Looking for README file')
58
58
59 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
59 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
60 db_repo.repo_id, CacheKey.CACHE_TYPE_README)
60 db_repo.repo_id, CacheKey.CACHE_TYPE_README)
61 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
61 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
62 repo_id=self.db_repo.repo_id)
62 repo_id=self.db_repo.repo_id)
63 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
63 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
64
64
65 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
65 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
66 def generate_repo_readme(repo_id, _repo_name, _renderer_type):
66 def generate_repo_readme(repo_id, _repo_name, _renderer_type):
67 readme_data = None
67 readme_data = None
68 readme_node = None
68 readme_node = None
69 readme_filename = None
69 readme_filename = None
70 commit = self._get_landing_commit_or_none(db_repo)
70 commit = self._get_landing_commit_or_none(db_repo)
71 if commit:
71 if commit:
72 log.debug("Searching for a README file.")
72 log.debug("Searching for a README file.")
73 readme_node = ReadmeFinder(_renderer_type).search(commit)
73 readme_node = ReadmeFinder(_renderer_type).search(commit)
74 if readme_node:
74 if readme_node:
75 relative_urls = {
75 relative_urls = {
76 'raw': h.route_path(
76 'raw': h.route_path(
77 'repo_file_raw', repo_name=_repo_name,
77 'repo_file_raw', repo_name=_repo_name,
78 commit_id=commit.raw_id, f_path=readme_node.path),
78 commit_id=commit.raw_id, f_path=readme_node.path),
79 'standard': h.route_path(
79 'standard': h.route_path(
80 'repo_files', repo_name=_repo_name,
80 'repo_files', repo_name=_repo_name,
81 commit_id=commit.raw_id, f_path=readme_node.path),
81 commit_id=commit.raw_id, f_path=readme_node.path),
82 }
82 }
83 readme_data = self._render_readme_or_none(
83 readme_data = self._render_readme_or_none(
84 commit, readme_node, relative_urls)
84 commit, readme_node, relative_urls)
85 readme_filename = readme_node.path
85 readme_filename = readme_node.path
86 return readme_data, readme_filename
86 return readme_data, readme_filename
87
87
88 inv_context_manager = rc_cache.InvalidationContext(
88 inv_context_manager = rc_cache.InvalidationContext(
89 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
89 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
90 with inv_context_manager as invalidation_context:
90 with inv_context_manager as invalidation_context:
91 # check for stored invalidation signal, and maybe purge the cache
91 args = (db_repo.repo_id, db_repo.repo_name, renderer_type,)
92 # before computing it again
92 # re-compute and store cache if we get invalidate signal
93 if invalidation_context.should_invalidate():
93 if invalidation_context.should_invalidate():
94 generate_repo_readme.invalidate(
94 instance = generate_repo_readme.refresh(*args)
95 db_repo.repo_id, db_repo.repo_name, renderer_type)
95 else:
96
96 instance = generate_repo_readme(*args)
97 instance = generate_repo_readme(
98 db_repo.repo_id, db_repo.repo_name, renderer_type)
99
97
100 log.debug(
98 log.debug(
101 'Repo readme generated and computed in %.3fs',
99 'Repo readme generated and computed in %.3fs',
102 inv_context_manager.compute_time)
100 inv_context_manager.compute_time)
103 return instance
101 return instance
104
102
105 def _get_landing_commit_or_none(self, db_repo):
103 def _get_landing_commit_or_none(self, db_repo):
106 log.debug("Getting the landing commit.")
104 log.debug("Getting the landing commit.")
107 try:
105 try:
108 commit = db_repo.get_landing_commit()
106 commit = db_repo.get_landing_commit()
109 if not isinstance(commit, EmptyCommit):
107 if not isinstance(commit, EmptyCommit):
110 return commit
108 return commit
111 else:
109 else:
112 log.debug("Repository is empty, no README to render.")
110 log.debug("Repository is empty, no README to render.")
113 except CommitError:
111 except CommitError:
114 log.exception(
112 log.exception(
115 "Problem getting commit when trying to render the README.")
113 "Problem getting commit when trying to render the README.")
116
114
117 def _render_readme_or_none(self, commit, readme_node, relative_urls):
115 def _render_readme_or_none(self, commit, readme_node, relative_urls):
118 log.debug(
116 log.debug(
119 'Found README file `%s` rendering...', readme_node.path)
117 'Found README file `%s` rendering...', readme_node.path)
120 renderer = MarkupRenderer()
118 renderer = MarkupRenderer()
121 try:
119 try:
122 html_source = renderer.render(
120 html_source = renderer.render(
123 readme_node.content, filename=readme_node.path)
121 readme_node.content, filename=readme_node.path)
124 if relative_urls:
122 if relative_urls:
125 return relative_links(html_source, relative_urls)
123 return relative_links(html_source, relative_urls)
126 return html_source
124 return html_source
127 except Exception:
125 except Exception:
128 log.exception(
126 log.exception(
129 "Exception while trying to render the README")
127 "Exception while trying to render the README")
130
128
131 def _load_commits_context(self, c):
129 def _load_commits_context(self, c):
132 p = safe_int(self.request.GET.get('page'), 1)
130 p = safe_int(self.request.GET.get('page'), 1)
133 size = safe_int(self.request.GET.get('size'), 10)
131 size = safe_int(self.request.GET.get('size'), 10)
134
132
135 def url_generator(**kw):
133 def url_generator(**kw):
136 query_params = {
134 query_params = {
137 'size': size
135 'size': size
138 }
136 }
139 query_params.update(kw)
137 query_params.update(kw)
140 return h.route_path(
138 return h.route_path(
141 'repo_summary_commits',
139 'repo_summary_commits',
142 repo_name=c.rhodecode_db_repo.repo_name, _query=query_params)
140 repo_name=c.rhodecode_db_repo.repo_name, _query=query_params)
143
141
144 pre_load = ['author', 'branch', 'date', 'message']
142 pre_load = ['author', 'branch', 'date', 'message']
145 try:
143 try:
146 collection = self.rhodecode_vcs_repo.get_commits(pre_load=pre_load)
144 collection = self.rhodecode_vcs_repo.get_commits(pre_load=pre_load)
147 except EmptyRepositoryError:
145 except EmptyRepositoryError:
148 collection = self.rhodecode_vcs_repo
146 collection = self.rhodecode_vcs_repo
149
147
150 c.repo_commits = h.RepoPage(
148 c.repo_commits = h.RepoPage(
151 collection, page=p, items_per_page=size, url=url_generator)
149 collection, page=p, items_per_page=size, url=url_generator)
152 page_ids = [x.raw_id for x in c.repo_commits]
150 page_ids = [x.raw_id for x in c.repo_commits]
153 c.comments = self.db_repo.get_comments(page_ids)
151 c.comments = self.db_repo.get_comments(page_ids)
154 c.statuses = self.db_repo.statuses(page_ids)
152 c.statuses = self.db_repo.statuses(page_ids)
155
153
156 @LoginRequired()
154 @LoginRequired()
157 @HasRepoPermissionAnyDecorator(
155 @HasRepoPermissionAnyDecorator(
158 'repository.read', 'repository.write', 'repository.admin')
156 'repository.read', 'repository.write', 'repository.admin')
159 @view_config(
157 @view_config(
160 route_name='repo_summary_commits', request_method='GET',
158 route_name='repo_summary_commits', request_method='GET',
161 renderer='rhodecode:templates/summary/summary_commits.mako')
159 renderer='rhodecode:templates/summary/summary_commits.mako')
162 def summary_commits(self):
160 def summary_commits(self):
163 c = self.load_default_context()
161 c = self.load_default_context()
164 self._load_commits_context(c)
162 self._load_commits_context(c)
165 return self._get_template_context(c)
163 return self._get_template_context(c)
166
164
167 @LoginRequired()
165 @LoginRequired()
168 @HasRepoPermissionAnyDecorator(
166 @HasRepoPermissionAnyDecorator(
169 'repository.read', 'repository.write', 'repository.admin')
167 'repository.read', 'repository.write', 'repository.admin')
170 @view_config(
168 @view_config(
171 route_name='repo_summary', request_method='GET',
169 route_name='repo_summary', request_method='GET',
172 renderer='rhodecode:templates/summary/summary.mako')
170 renderer='rhodecode:templates/summary/summary.mako')
173 @view_config(
171 @view_config(
174 route_name='repo_summary_slash', request_method='GET',
172 route_name='repo_summary_slash', request_method='GET',
175 renderer='rhodecode:templates/summary/summary.mako')
173 renderer='rhodecode:templates/summary/summary.mako')
176 @view_config(
174 @view_config(
177 route_name='repo_summary_explicit', request_method='GET',
175 route_name='repo_summary_explicit', request_method='GET',
178 renderer='rhodecode:templates/summary/summary.mako')
176 renderer='rhodecode:templates/summary/summary.mako')
179 def summary(self):
177 def summary(self):
180 c = self.load_default_context()
178 c = self.load_default_context()
181
179
182 # Prepare the clone URL
180 # Prepare the clone URL
183 username = ''
181 username = ''
184 if self._rhodecode_user.username != User.DEFAULT_USER:
182 if self._rhodecode_user.username != User.DEFAULT_USER:
185 username = safe_str(self._rhodecode_user.username)
183 username = safe_str(self._rhodecode_user.username)
186
184
187 _def_clone_uri = _def_clone_uri_id = c.clone_uri_tmpl
185 _def_clone_uri = _def_clone_uri_id = c.clone_uri_tmpl
188 _def_clone_uri_ssh = c.clone_uri_ssh_tmpl
186 _def_clone_uri_ssh = c.clone_uri_ssh_tmpl
189
187
190 if '{repo}' in _def_clone_uri:
188 if '{repo}' in _def_clone_uri:
191 _def_clone_uri_id = _def_clone_uri.replace(
189 _def_clone_uri_id = _def_clone_uri.replace(
192 '{repo}', '_{repoid}')
190 '{repo}', '_{repoid}')
193 elif '{repoid}' in _def_clone_uri:
191 elif '{repoid}' in _def_clone_uri:
194 _def_clone_uri_id = _def_clone_uri.replace(
192 _def_clone_uri_id = _def_clone_uri.replace(
195 '_{repoid}', '{repo}')
193 '_{repoid}', '{repo}')
196
194
197 c.clone_repo_url = self.db_repo.clone_url(
195 c.clone_repo_url = self.db_repo.clone_url(
198 user=username, uri_tmpl=_def_clone_uri)
196 user=username, uri_tmpl=_def_clone_uri)
199 c.clone_repo_url_id = self.db_repo.clone_url(
197 c.clone_repo_url_id = self.db_repo.clone_url(
200 user=username, uri_tmpl=_def_clone_uri_id)
198 user=username, uri_tmpl=_def_clone_uri_id)
201 c.clone_repo_url_ssh = self.db_repo.clone_url(
199 c.clone_repo_url_ssh = self.db_repo.clone_url(
202 uri_tmpl=_def_clone_uri_ssh, ssh=True)
200 uri_tmpl=_def_clone_uri_ssh, ssh=True)
203
201
204 # If enabled, get statistics data
202 # If enabled, get statistics data
205
203
206 c.show_stats = bool(self.db_repo.enable_statistics)
204 c.show_stats = bool(self.db_repo.enable_statistics)
207
205
208 stats = Session().query(Statistics) \
206 stats = Session().query(Statistics) \
209 .filter(Statistics.repository == self.db_repo) \
207 .filter(Statistics.repository == self.db_repo) \
210 .scalar()
208 .scalar()
211
209
212 c.stats_percentage = 0
210 c.stats_percentage = 0
213
211
214 if stats and stats.languages:
212 if stats and stats.languages:
215 c.no_data = False is self.db_repo.enable_statistics
213 c.no_data = False is self.db_repo.enable_statistics
216 lang_stats_d = json.loads(stats.languages)
214 lang_stats_d = json.loads(stats.languages)
217
215
218 # Sort first by decreasing count and second by the file extension,
216 # Sort first by decreasing count and second by the file extension,
219 # so we have a consistent output.
217 # so we have a consistent output.
220 lang_stats_items = sorted(lang_stats_d.iteritems(),
218 lang_stats_items = sorted(lang_stats_d.iteritems(),
221 key=lambda k: (-k[1], k[0]))[:10]
219 key=lambda k: (-k[1], k[0]))[:10]
222 lang_stats = [(x, {"count": y,
220 lang_stats = [(x, {"count": y,
223 "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
221 "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
224 for x, y in lang_stats_items]
222 for x, y in lang_stats_items]
225
223
226 c.trending_languages = json.dumps(lang_stats)
224 c.trending_languages = json.dumps(lang_stats)
227 else:
225 else:
228 c.no_data = True
226 c.no_data = True
229 c.trending_languages = json.dumps({})
227 c.trending_languages = json.dumps({})
230
228
231 scm_model = ScmModel()
229 scm_model = ScmModel()
232 c.enable_downloads = self.db_repo.enable_downloads
230 c.enable_downloads = self.db_repo.enable_downloads
233 c.repository_followers = scm_model.get_followers(self.db_repo)
231 c.repository_followers = scm_model.get_followers(self.db_repo)
234 c.repository_forks = scm_model.get_forks(self.db_repo)
232 c.repository_forks = scm_model.get_forks(self.db_repo)
235 c.repository_is_user_following = scm_model.is_following_repo(
233 c.repository_is_user_following = scm_model.is_following_repo(
236 self.db_repo_name, self._rhodecode_user.user_id)
234 self.db_repo_name, self._rhodecode_user.user_id)
237
235
238 # first interaction with the VCS instance after here...
236 # first interaction with the VCS instance after here...
239 if c.repository_requirements_missing:
237 if c.repository_requirements_missing:
240 self.request.override_renderer = \
238 self.request.override_renderer = \
241 'rhodecode:templates/summary/missing_requirements.mako'
239 'rhodecode:templates/summary/missing_requirements.mako'
242 return self._get_template_context(c)
240 return self._get_template_context(c)
243
241
244 c.readme_data, c.readme_file = \
242 c.readme_data, c.readme_file = \
245 self._get_readme_data(self.db_repo, c.visual.default_renderer)
243 self._get_readme_data(self.db_repo, c.visual.default_renderer)
246
244
247 # loads the summary commits template context
245 # loads the summary commits template context
248 self._load_commits_context(c)
246 self._load_commits_context(c)
249
247
250 return self._get_template_context(c)
248 return self._get_template_context(c)
251
249
252 def get_request_commit_id(self):
250 def get_request_commit_id(self):
253 return self.request.matchdict['commit_id']
251 return self.request.matchdict['commit_id']
254
252
255 @LoginRequired()
253 @LoginRequired()
256 @HasRepoPermissionAnyDecorator(
254 @HasRepoPermissionAnyDecorator(
257 'repository.read', 'repository.write', 'repository.admin')
255 'repository.read', 'repository.write', 'repository.admin')
258 @view_config(
256 @view_config(
259 route_name='repo_stats', request_method='GET',
257 route_name='repo_stats', request_method='GET',
260 renderer='json_ext')
258 renderer='json_ext')
261 def repo_stats(self):
259 def repo_stats(self):
262 commit_id = self.get_request_commit_id()
260 commit_id = self.get_request_commit_id()
263 show_stats = bool(self.db_repo.enable_statistics)
261 show_stats = bool(self.db_repo.enable_statistics)
264 repo_id = self.db_repo.repo_id
262 repo_id = self.db_repo.repo_id
265
263
266 cache_seconds = safe_int(
264 cache_seconds = safe_int(
267 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
265 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
268 cache_on = cache_seconds > 0
266 cache_on = cache_seconds > 0
269 log.debug(
267 log.debug(
270 'Computing REPO TREE for repo_id %s commit_id `%s` '
268 'Computing REPO TREE for repo_id %s commit_id `%s` '
271 'with caching: %s[TTL: %ss]' % (
269 'with caching: %s[TTL: %ss]' % (
272 repo_id, commit_id, cache_on, cache_seconds or 0))
270 repo_id, commit_id, cache_on, cache_seconds or 0))
273
271
274 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
272 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
275 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
273 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
276
274
277 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
275 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
278 condition=cache_on)
276 condition=cache_on)
279 def compute_stats(repo_id, commit_id, show_stats):
277 def compute_stats(repo_id, commit_id, show_stats):
280 code_stats = {}
278 code_stats = {}
281 size = 0
279 size = 0
282 try:
280 try:
283 scm_instance = self.db_repo.scm_instance()
281 scm_instance = self.db_repo.scm_instance()
284 commit = scm_instance.get_commit(commit_id)
282 commit = scm_instance.get_commit(commit_id)
285
283
286 for node in commit.get_filenodes_generator():
284 for node in commit.get_filenodes_generator():
287 size += node.size
285 size += node.size
288 if not show_stats:
286 if not show_stats:
289 continue
287 continue
290 ext = string.lower(node.extension)
288 ext = string.lower(node.extension)
291 ext_info = LANGUAGES_EXTENSIONS_MAP.get(ext)
289 ext_info = LANGUAGES_EXTENSIONS_MAP.get(ext)
292 if ext_info:
290 if ext_info:
293 if ext in code_stats:
291 if ext in code_stats:
294 code_stats[ext]['count'] += 1
292 code_stats[ext]['count'] += 1
295 else:
293 else:
296 code_stats[ext] = {"count": 1, "desc": ext_info}
294 code_stats[ext] = {"count": 1, "desc": ext_info}
297 except (EmptyRepositoryError, CommitDoesNotExistError):
295 except (EmptyRepositoryError, CommitDoesNotExistError):
298 pass
296 pass
299 return {'size': h.format_byte_size_binary(size),
297 return {'size': h.format_byte_size_binary(size),
300 'code_stats': code_stats}
298 'code_stats': code_stats}
301
299
302 stats = compute_stats(self.db_repo.repo_id, commit_id, show_stats)
300 stats = compute_stats(self.db_repo.repo_id, commit_id, show_stats)
303 return stats
301 return stats
304
302
305 @LoginRequired()
303 @LoginRequired()
306 @HasRepoPermissionAnyDecorator(
304 @HasRepoPermissionAnyDecorator(
307 'repository.read', 'repository.write', 'repository.admin')
305 'repository.read', 'repository.write', 'repository.admin')
308 @view_config(
306 @view_config(
309 route_name='repo_refs_data', request_method='GET',
307 route_name='repo_refs_data', request_method='GET',
310 renderer='json_ext')
308 renderer='json_ext')
311 def repo_refs_data(self):
309 def repo_refs_data(self):
312 _ = self.request.translate
310 _ = self.request.translate
313 self.load_default_context()
311 self.load_default_context()
314
312
315 repo = self.rhodecode_vcs_repo
313 repo = self.rhodecode_vcs_repo
316 refs_to_create = [
314 refs_to_create = [
317 (_("Branch"), repo.branches, 'branch'),
315 (_("Branch"), repo.branches, 'branch'),
318 (_("Tag"), repo.tags, 'tag'),
316 (_("Tag"), repo.tags, 'tag'),
319 (_("Bookmark"), repo.bookmarks, 'book'),
317 (_("Bookmark"), repo.bookmarks, 'book'),
320 ]
318 ]
321 res = self._create_reference_data(
319 res = self._create_reference_data(
322 repo, self.db_repo_name, refs_to_create)
320 repo, self.db_repo_name, refs_to_create)
323 data = {
321 data = {
324 'more': False,
322 'more': False,
325 'results': res
323 'results': res
326 }
324 }
327 return data
325 return data
328
326
329 @LoginRequired()
327 @LoginRequired()
330 @HasRepoPermissionAnyDecorator(
328 @HasRepoPermissionAnyDecorator(
331 'repository.read', 'repository.write', 'repository.admin')
329 'repository.read', 'repository.write', 'repository.admin')
332 @view_config(
330 @view_config(
333 route_name='repo_refs_changelog_data', request_method='GET',
331 route_name='repo_refs_changelog_data', request_method='GET',
334 renderer='json_ext')
332 renderer='json_ext')
335 def repo_refs_changelog_data(self):
333 def repo_refs_changelog_data(self):
336 _ = self.request.translate
334 _ = self.request.translate
337 self.load_default_context()
335 self.load_default_context()
338
336
339 repo = self.rhodecode_vcs_repo
337 repo = self.rhodecode_vcs_repo
340
338
341 refs_to_create = [
339 refs_to_create = [
342 (_("Branches"), repo.branches, 'branch'),
340 (_("Branches"), repo.branches, 'branch'),
343 (_("Closed branches"), repo.branches_closed, 'branch_closed'),
341 (_("Closed branches"), repo.branches_closed, 'branch_closed'),
344 # TODO: enable when vcs can handle bookmarks filters
342 # TODO: enable when vcs can handle bookmarks filters
345 # (_("Bookmarks"), repo.bookmarks, "book"),
343 # (_("Bookmarks"), repo.bookmarks, "book"),
346 ]
344 ]
347 res = self._create_reference_data(
345 res = self._create_reference_data(
348 repo, self.db_repo_name, refs_to_create)
346 repo, self.db_repo_name, refs_to_create)
349 data = {
347 data = {
350 'more': False,
348 'more': False,
351 'results': res
349 'results': res
352 }
350 }
353 return data
351 return data
354
352
355 def _create_reference_data(self, repo, full_repo_name, refs_to_create):
353 def _create_reference_data(self, repo, full_repo_name, refs_to_create):
356 format_ref_id = utils.get_format_ref_id(repo)
354 format_ref_id = utils.get_format_ref_id(repo)
357
355
358 result = []
356 result = []
359 for title, refs, ref_type in refs_to_create:
357 for title, refs, ref_type in refs_to_create:
360 if refs:
358 if refs:
361 result.append({
359 result.append({
362 'text': title,
360 'text': title,
363 'children': self._create_reference_items(
361 'children': self._create_reference_items(
364 repo, full_repo_name, refs, ref_type,
362 repo, full_repo_name, refs, ref_type,
365 format_ref_id),
363 format_ref_id),
366 })
364 })
367 return result
365 return result
368
366
369 def _create_reference_items(self, repo, full_repo_name, refs, ref_type,
367 def _create_reference_items(self, repo, full_repo_name, refs, ref_type,
370 format_ref_id):
368 format_ref_id):
371 result = []
369 result = []
372 is_svn = h.is_svn(repo)
370 is_svn = h.is_svn(repo)
373 for ref_name, raw_id in refs.iteritems():
371 for ref_name, raw_id in refs.iteritems():
374 files_url = self._create_files_url(
372 files_url = self._create_files_url(
375 repo, full_repo_name, ref_name, raw_id, is_svn)
373 repo, full_repo_name, ref_name, raw_id, is_svn)
376 result.append({
374 result.append({
377 'text': ref_name,
375 'text': ref_name,
378 'id': format_ref_id(ref_name, raw_id),
376 'id': format_ref_id(ref_name, raw_id),
379 'raw_id': raw_id,
377 'raw_id': raw_id,
380 'type': ref_type,
378 'type': ref_type,
381 'files_url': files_url,
379 'files_url': files_url,
382 })
380 })
383 return result
381 return result
384
382
385 def _create_files_url(self, repo, full_repo_name, ref_name, raw_id, is_svn):
383 def _create_files_url(self, repo, full_repo_name, ref_name, raw_id, is_svn):
386 use_commit_id = '/' in ref_name or is_svn
384 use_commit_id = '/' in ref_name or is_svn
387 return h.route_path(
385 return h.route_path(
388 'repo_files',
386 'repo_files',
389 repo_name=full_repo_name,
387 repo_name=full_repo_name,
390 f_path=ref_name if is_svn else '',
388 f_path=ref_name if is_svn else '',
391 commit_id=raw_id if use_commit_id else ref_name,
389 commit_id=raw_id if use_commit_id else ref_name,
392 _query=dict(at=ref_name))
390 _query=dict(at=ref_name))
@@ -1,319 +1,320 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2018 RhodeCode GmbH
3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import os
20 import os
21 import time
21 import time
22 import logging
22 import logging
23 import functools
23 import functools
24 import threading
24 import threading
25
25
26 from dogpile.cache import CacheRegion
26 from dogpile.cache import CacheRegion
27 from dogpile.cache.util import compat
27 from dogpile.cache.util import compat
28
28
29 import rhodecode
29 import rhodecode
30 from rhodecode.lib.utils import safe_str, sha1
30 from rhodecode.lib.utils import safe_str, sha1
31 from rhodecode.lib.utils2 import safe_unicode, str2bool
31 from rhodecode.lib.utils2 import safe_unicode, str2bool
32 from rhodecode.model.db import Session, CacheKey, IntegrityError
32 from rhodecode.model.db import Session, CacheKey, IntegrityError
33
33
34 from . import region_meta
34 from . import region_meta
35
35
36 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
37
37
38
38
39 class RhodeCodeCacheRegion(CacheRegion):
39 class RhodeCodeCacheRegion(CacheRegion):
40
40
41 def conditional_cache_on_arguments(
41 def conditional_cache_on_arguments(
42 self, namespace=None,
42 self, namespace=None,
43 expiration_time=None,
43 expiration_time=None,
44 should_cache_fn=None,
44 should_cache_fn=None,
45 to_str=compat.string_type,
45 to_str=compat.string_type,
46 function_key_generator=None,
46 function_key_generator=None,
47 condition=True):
47 condition=True):
48 """
48 """
49 Custom conditional decorator, that will not touch any dogpile internals if
49 Custom conditional decorator, that will not touch any dogpile internals if
50 condition isn't meet. This works a bit different than should_cache_fn
50 condition isn't meet. This works a bit different than should_cache_fn
51 And it's faster in cases we don't ever want to compute cached values
51 And it's faster in cases we don't ever want to compute cached values
52 """
52 """
53 expiration_time_is_callable = compat.callable(expiration_time)
53 expiration_time_is_callable = compat.callable(expiration_time)
54
54
55 if function_key_generator is None:
55 if function_key_generator is None:
56 function_key_generator = self.function_key_generator
56 function_key_generator = self.function_key_generator
57
57
58 def decorator(fn):
58 def decorator(fn):
59 if to_str is compat.string_type:
59 if to_str is compat.string_type:
60 # backwards compatible
60 # backwards compatible
61 key_generator = function_key_generator(namespace, fn)
61 key_generator = function_key_generator(namespace, fn)
62 else:
62 else:
63 key_generator = function_key_generator(namespace, fn, to_str=to_str)
63 key_generator = function_key_generator(namespace, fn, to_str=to_str)
64
64
65 @functools.wraps(fn)
65 @functools.wraps(fn)
66 def decorate(*arg, **kw):
66 def decorate(*arg, **kw):
67 key = key_generator(*arg, **kw)
67 key = key_generator(*arg, **kw)
68
68
69 @functools.wraps(fn)
69 @functools.wraps(fn)
70 def creator():
70 def creator():
71 return fn(*arg, **kw)
71 return fn(*arg, **kw)
72
72
73 if not condition:
73 if not condition:
74 return creator()
74 return creator()
75
75
76 timeout = expiration_time() if expiration_time_is_callable \
76 timeout = expiration_time() if expiration_time_is_callable \
77 else expiration_time
77 else expiration_time
78
78
79 return self.get_or_create(key, creator, timeout, should_cache_fn)
79 return self.get_or_create(key, creator, timeout, should_cache_fn)
80
80
81 def invalidate(*arg, **kw):
81 def invalidate(*arg, **kw):
82 key = key_generator(*arg, **kw)
82 key = key_generator(*arg, **kw)
83 self.delete(key)
83 self.delete(key)
84
84
85 def set_(value, *arg, **kw):
85 def set_(value, *arg, **kw):
86 key = key_generator(*arg, **kw)
86 key = key_generator(*arg, **kw)
87 self.set(key, value)
87 self.set(key, value)
88
88
89 def get(*arg, **kw):
89 def get(*arg, **kw):
90 key = key_generator(*arg, **kw)
90 key = key_generator(*arg, **kw)
91 return self.get(key)
91 return self.get(key)
92
92
93 def refresh(*arg, **kw):
93 def refresh(*arg, **kw):
94 key = key_generator(*arg, **kw)
94 key = key_generator(*arg, **kw)
95 value = fn(*arg, **kw)
95 value = fn(*arg, **kw)
96 self.set(key, value)
96 self.set(key, value)
97 return value
97 return value
98
98
99 decorate.set = set_
99 decorate.set = set_
100 decorate.invalidate = invalidate
100 decorate.invalidate = invalidate
101 decorate.refresh = refresh
101 decorate.refresh = refresh
102 decorate.get = get
102 decorate.get = get
103 decorate.original = fn
103 decorate.original = fn
104 decorate.key_generator = key_generator
104 decorate.key_generator = key_generator
105
105
106 return decorate
106 return decorate
107
107
108 return decorator
108 return decorator
109
109
110
110
111 def make_region(*arg, **kw):
111 def make_region(*arg, **kw):
112 return RhodeCodeCacheRegion(*arg, **kw)
112 return RhodeCodeCacheRegion(*arg, **kw)
113
113
114
114
115 def get_default_cache_settings(settings, prefixes=None):
115 def get_default_cache_settings(settings, prefixes=None):
116 prefixes = prefixes or []
116 prefixes = prefixes or []
117 cache_settings = {}
117 cache_settings = {}
118 for key in settings.keys():
118 for key in settings.keys():
119 for prefix in prefixes:
119 for prefix in prefixes:
120 if key.startswith(prefix):
120 if key.startswith(prefix):
121 name = key.split(prefix)[1].strip()
121 name = key.split(prefix)[1].strip()
122 val = settings[key]
122 val = settings[key]
123 if isinstance(val, basestring):
123 if isinstance(val, basestring):
124 val = val.strip()
124 val = val.strip()
125 cache_settings[name] = val
125 cache_settings[name] = val
126 return cache_settings
126 return cache_settings
127
127
128
128
129 def compute_key_from_params(*args):
129 def compute_key_from_params(*args):
130 """
130 """
131 Helper to compute key from given params to be used in cache manager
131 Helper to compute key from given params to be used in cache manager
132 """
132 """
133 return sha1("_".join(map(safe_str, args)))
133 return sha1("_".join(map(safe_str, args)))
134
134
135
135
136 def key_generator(namespace, fn):
136 def key_generator(namespace, fn):
137 fname = fn.__name__
137 fname = fn.__name__
138
138
139 def generate_key(*args):
139 def generate_key(*args):
140 namespace_pref = namespace or 'default'
140 namespace_pref = namespace or 'default'
141 arg_key = compute_key_from_params(*args)
141 arg_key = compute_key_from_params(*args)
142 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
142 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
143
143
144 return final_key
144 return final_key
145
145
146 return generate_key
146 return generate_key
147
147
148
148
149 def get_or_create_region(region_name, region_namespace=None):
149 def get_or_create_region(region_name, region_namespace=None):
150 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
150 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
151 region_obj = region_meta.dogpile_cache_regions.get(region_name)
151 region_obj = region_meta.dogpile_cache_regions.get(region_name)
152 if not region_obj:
152 if not region_obj:
153 raise EnvironmentError(
153 raise EnvironmentError(
154 'Region `{}` not in configured: {}.'.format(
154 'Region `{}` not in configured: {}.'.format(
155 region_name, region_meta.dogpile_cache_regions.keys()))
155 region_name, region_meta.dogpile_cache_regions.keys()))
156
156
157 region_uid_name = '{}:{}'.format(region_name, region_namespace)
157 region_uid_name = '{}:{}'.format(region_name, region_namespace)
158 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
158 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
159 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
159 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
160 if region_exist:
160 if region_exist:
161 log.debug('Using already configured region: %s', region_namespace)
161 log.debug('Using already configured region: %s', region_namespace)
162 return region_exist
162 return region_exist
163 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
163 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
164 expiration_time = region_obj.expiration_time
164 expiration_time = region_obj.expiration_time
165
165
166 if not os.path.isdir(cache_dir):
166 if not os.path.isdir(cache_dir):
167 os.makedirs(cache_dir)
167 os.makedirs(cache_dir)
168 new_region = make_region(
168 new_region = make_region(
169 name=region_uid_name, function_key_generator=key_generator
169 name=region_uid_name, function_key_generator=key_generator
170 )
170 )
171 namespace_filename = os.path.join(
171 namespace_filename = os.path.join(
172 cache_dir, "{}.cache.dbm".format(region_namespace))
172 cache_dir, "{}.cache.dbm".format(region_namespace))
173 # special type that allows 1db per namespace
173 # special type that allows 1db per namespace
174 new_region.configure(
174 new_region.configure(
175 backend='dogpile.cache.rc.file_namespace',
175 backend='dogpile.cache.rc.file_namespace',
176 expiration_time=expiration_time,
176 expiration_time=expiration_time,
177 arguments={"filename": namespace_filename}
177 arguments={"filename": namespace_filename}
178 )
178 )
179
179
180 # create and save in region caches
180 # create and save in region caches
181 log.debug('configuring new region: %s',region_uid_name)
181 log.debug('configuring new region: %s',region_uid_name)
182 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
182 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
183
183
184 return region_obj
184 return region_obj
185
185
186
186
187 def clear_cache_namespace(cache_region, cache_namespace_uid):
187 def clear_cache_namespace(cache_region, cache_namespace_uid):
188 region = get_or_create_region(cache_region, cache_namespace_uid)
188 region = get_or_create_region(cache_region, cache_namespace_uid)
189 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
189 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
190 region.delete_multi(cache_keys)
190 region.delete_multi(cache_keys)
191 return len(cache_keys)
191 return len(cache_keys)
192
192
193
193
194 class ActiveRegionCache(object):
194 class ActiveRegionCache(object):
195 def __init__(self, context):
195 def __init__(self, context):
196 self.context = context
196 self.context = context
197
197
198 def should_invalidate(self):
198 def should_invalidate(self):
199 return False
199 return False
200
200
201
201
202 class FreshRegionCache(object):
202 class FreshRegionCache(object):
203 def __init__(self, context):
203 def __init__(self, context):
204 self.context = context
204 self.context = context
205
205
206 def should_invalidate(self):
206 def should_invalidate(self):
207 return True
207 return True
208
208
209
209
210 class InvalidationContext(object):
210 class InvalidationContext(object):
211 """
211 """
212 usage::
212 usage::
213
213
214 from rhodecode.lib import rc_cache
214 from rhodecode.lib import rc_cache
215
215
216 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
216 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
217 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
217 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
218
218
219 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
219 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
220 def heavy_compute(cache_name, param1, param2):
220 def heavy_compute(cache_name, param1, param2):
221 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
221 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
222
222
223 # invalidation namespace is shared namespace key for all process caches
223 # invalidation namespace is shared namespace key for all process caches
224 # we use it to send a global signal
224 # we use it to send a global signal
225 invalidation_namespace = 'repo_cache:1'
225 invalidation_namespace = 'repo_cache:1'
226
226
227 inv_context_manager = rc_cache.InvalidationContext(
227 inv_context_manager = rc_cache.InvalidationContext(
228 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
228 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
229 with inv_context_manager as invalidation_context:
229 with inv_context_manager as invalidation_context:
230 # check for stored invalidation signal, and maybe purge the cache
230 args = ('one', 'two')
231 # before computing it again
231 # re-compute and store cache if we get invalidate signal
232 if invalidation_context.should_invalidate():
232 if invalidation_context.should_invalidate():
233 heavy_compute.invalidate('some_name', 'param1', 'param2')
233 result = heavy_compute.refresh(*args)
234 else:
235 result = heavy_compute(*args)
234
236
235 result = heavy_compute('some_name', 'param1', 'param2')
236 compute_time = inv_context_manager.compute_time
237 compute_time = inv_context_manager.compute_time
237 print(compute_time)
238 log.debug('result computed in %.3fs' ,compute_time)
238
239
239 # To send global invalidation signal, simply run
240 # To send global invalidation signal, simply run
240 CacheKey.set_invalidate(invalidation_namespace)
241 CacheKey.set_invalidate(invalidation_namespace)
241
242
242 """
243 """
243
244
244 def __repr__(self):
245 def __repr__(self):
245 return '<InvalidationContext:{}[{}]>'.format(
246 return '<InvalidationContext:{}[{}]>'.format(
246 safe_str(self.cache_key), safe_str(self.uid))
247 safe_str(self.cache_key), safe_str(self.uid))
247
248
248 def __init__(self, uid, invalidation_namespace='',
249 def __init__(self, uid, invalidation_namespace='',
249 raise_exception=False, thread_scoped=None):
250 raise_exception=False, thread_scoped=None):
250 self.uid = uid
251 self.uid = uid
251 self.invalidation_namespace = invalidation_namespace
252 self.invalidation_namespace = invalidation_namespace
252 self.raise_exception = raise_exception
253 self.raise_exception = raise_exception
253 self.proc_id = safe_unicode(rhodecode.CONFIG.get('instance_id') or 'DEFAULT')
254 self.proc_id = safe_unicode(rhodecode.CONFIG.get('instance_id') or 'DEFAULT')
254 self.thread_id = 'global'
255 self.thread_id = 'global'
255
256
256 if thread_scoped is None:
257 if thread_scoped is None:
257 # if we set "default" we can override this via .ini settings
258 # if we set "default" we can override this via .ini settings
258 thread_scoped = str2bool(rhodecode.CONFIG.get('cache_thread_scoped'))
259 thread_scoped = str2bool(rhodecode.CONFIG.get('cache_thread_scoped'))
259
260
260 # Append the thread id to the cache key if this invalidation context
261 # Append the thread id to the cache key if this invalidation context
261 # should be scoped to the current thread.
262 # should be scoped to the current thread.
262 if thread_scoped is True:
263 if thread_scoped is True:
263 self.thread_id = threading.current_thread().ident
264 self.thread_id = threading.current_thread().ident
264
265
265 self.cache_key = compute_key_from_params(uid)
266 self.cache_key = compute_key_from_params(uid)
266 self.cache_key = 'proc:{}_thread:{}_{}'.format(
267 self.cache_key = 'proc:{}_thread:{}_{}'.format(
267 self.proc_id, self.thread_id, self.cache_key)
268 self.proc_id, self.thread_id, self.cache_key)
268 self.compute_time = 0
269 self.compute_time = 0
269
270
270 def get_or_create_cache_obj(self, uid, invalidation_namespace=''):
271 def get_or_create_cache_obj(self, uid, invalidation_namespace=''):
271 cache_obj = CacheKey.get_active_cache(self.cache_key)
272 cache_obj = CacheKey.get_active_cache(self.cache_key)
272 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
273 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
273 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
274 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
274 if not cache_obj:
275 if not cache_obj:
275 cache_obj = CacheKey(self.cache_key, cache_args=invalidation_namespace)
276 cache_obj = CacheKey(self.cache_key, cache_args=invalidation_namespace)
276 return cache_obj
277 return cache_obj
277
278
278 def __enter__(self):
279 def __enter__(self):
279 """
280 """
280 Test if current object is valid, and return CacheRegion function
281 Test if current object is valid, and return CacheRegion function
281 that does invalidation and calculation
282 that does invalidation and calculation
282 """
283 """
283 # register or get a new key based on uid
284 # register or get a new key based on uid
284 self.cache_obj = self.get_or_create_cache_obj(uid=self.uid)
285 self.cache_obj = self.get_or_create_cache_obj(uid=self.uid)
285 self._start_time = time.time()
286 self._start_time = time.time()
286 if self.cache_obj.cache_active:
287 if self.cache_obj.cache_active:
287 # means our cache obj is existing and marked as it's
288 # means our cache obj is existing and marked as it's
288 # cache is not outdated, we return ActiveRegionCache
289 # cache is not outdated, we return ActiveRegionCache
289 self.skip_cache_active_change = True
290 self.skip_cache_active_change = True
290
291
291 return ActiveRegionCache(context=self)
292 return ActiveRegionCache(context=self)
292
293
293 # the key is either not existing or set to False, we return
294 # the key is either not existing or set to False, we return
294 # the real invalidator which re-computes value. We additionally set
295 # the real invalidator which re-computes value. We additionally set
295 # the flag to actually update the Database objects
296 # the flag to actually update the Database objects
296 self.skip_cache_active_change = False
297 self.skip_cache_active_change = False
297 return FreshRegionCache(context=self)
298 return FreshRegionCache(context=self)
298
299
299 def __exit__(self, exc_type, exc_val, exc_tb):
300 def __exit__(self, exc_type, exc_val, exc_tb):
300 # save compute time
301 # save compute time
301 self.compute_time = time.time() - self._start_time
302 self.compute_time = time.time() - self._start_time
302
303
303 if self.skip_cache_active_change:
304 if self.skip_cache_active_change:
304 return
305 return
305
306
306 try:
307 try:
307 self.cache_obj.cache_active = True
308 self.cache_obj.cache_active = True
308 Session().add(self.cache_obj)
309 Session().add(self.cache_obj)
309 Session().commit()
310 Session().commit()
310 except IntegrityError:
311 except IntegrityError:
311 # if we catch integrity error, it means we inserted this object
312 # if we catch integrity error, it means we inserted this object
312 # assumption is that's really an edge race-condition case and
313 # assumption is that's really an edge race-condition case and
313 # it's safe is to skip it
314 # it's safe is to skip it
314 Session().rollback()
315 Session().rollback()
315 except Exception:
316 except Exception:
316 log.exception('Failed to commit on cache key update')
317 log.exception('Failed to commit on cache key update')
317 Session().rollback()
318 Session().rollback()
318 if self.raise_exception:
319 if self.raise_exception:
319 raise
320 raise
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,683 +1,687 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Package for testing various lib/helper functions in rhodecode
23 Package for testing various lib/helper functions in rhodecode
24 """
24 """
25
25
26 import datetime
26 import datetime
27 import string
27 import string
28 import mock
28 import mock
29 import pytest
29 import pytest
30
30
31 from rhodecode.tests import no_newline_id_generator
31 from rhodecode.tests import no_newline_id_generator
32 from rhodecode.tests.utils import run_test_concurrently
32 from rhodecode.tests.utils import run_test_concurrently
33
33
34 from rhodecode.lib import rc_cache
34 from rhodecode.lib import rc_cache
35 from rhodecode.lib.helpers import InitialsGravatar
35 from rhodecode.lib.helpers import InitialsGravatar
36 from rhodecode.lib.utils2 import AttributeDict
36 from rhodecode.lib.utils2 import AttributeDict
37
37
38 from rhodecode.model.db import Repository, CacheKey
38 from rhodecode.model.db import Repository, CacheKey
39
39
40
40
41 def _urls_for_proto(proto):
41 def _urls_for_proto(proto):
42 return [
42 return [
43 ('%s://127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
43 ('%s://127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
44 '%s://127.0.0.1' % proto),
44 '%s://127.0.0.1' % proto),
45 ('%s://marcink@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
45 ('%s://marcink@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
46 '%s://127.0.0.1' % proto),
46 '%s://127.0.0.1' % proto),
47 ('%s://marcink:pass@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
47 ('%s://marcink:pass@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
48 '%s://127.0.0.1' % proto),
48 '%s://127.0.0.1' % proto),
49 ('%s://127.0.0.1:8080' % proto, ['%s://' % proto, '127.0.0.1', '8080'],
49 ('%s://127.0.0.1:8080' % proto, ['%s://' % proto, '127.0.0.1', '8080'],
50 '%s://127.0.0.1:8080' % proto),
50 '%s://127.0.0.1:8080' % proto),
51 ('%s://domain.org' % proto, ['%s://' % proto, 'domain.org'],
51 ('%s://domain.org' % proto, ['%s://' % proto, 'domain.org'],
52 '%s://domain.org' % proto),
52 '%s://domain.org' % proto),
53 ('%s://user:pass@domain.org:8080' % proto,
53 ('%s://user:pass@domain.org:8080' % proto,
54 ['%s://' % proto, 'domain.org', '8080'],
54 ['%s://' % proto, 'domain.org', '8080'],
55 '%s://domain.org:8080' % proto),
55 '%s://domain.org:8080' % proto),
56 ]
56 ]
57
57
58 TEST_URLS = _urls_for_proto('http') + _urls_for_proto('https')
58 TEST_URLS = _urls_for_proto('http') + _urls_for_proto('https')
59
59
60
60
61 @pytest.mark.parametrize("test_url, expected, expected_creds", TEST_URLS)
61 @pytest.mark.parametrize("test_url, expected, expected_creds", TEST_URLS)
62 def test_uri_filter(test_url, expected, expected_creds):
62 def test_uri_filter(test_url, expected, expected_creds):
63 from rhodecode.lib.utils2 import uri_filter
63 from rhodecode.lib.utils2 import uri_filter
64 assert uri_filter(test_url) == expected
64 assert uri_filter(test_url) == expected
65
65
66
66
67 @pytest.mark.parametrize("test_url, expected, expected_creds", TEST_URLS)
67 @pytest.mark.parametrize("test_url, expected, expected_creds", TEST_URLS)
68 def test_credentials_filter(test_url, expected, expected_creds):
68 def test_credentials_filter(test_url, expected, expected_creds):
69 from rhodecode.lib.utils2 import credentials_filter
69 from rhodecode.lib.utils2 import credentials_filter
70 assert credentials_filter(test_url) == expected_creds
70 assert credentials_filter(test_url) == expected_creds
71
71
72
72
73 @pytest.mark.parametrize("str_bool, expected", [
73 @pytest.mark.parametrize("str_bool, expected", [
74 ('t', True),
74 ('t', True),
75 ('true', True),
75 ('true', True),
76 ('y', True),
76 ('y', True),
77 ('yes', True),
77 ('yes', True),
78 ('on', True),
78 ('on', True),
79 ('1', True),
79 ('1', True),
80 ('Y', True),
80 ('Y', True),
81 ('yeS', True),
81 ('yeS', True),
82 ('Y', True),
82 ('Y', True),
83 ('TRUE', True),
83 ('TRUE', True),
84 ('T', True),
84 ('T', True),
85 ('False', False),
85 ('False', False),
86 ('F', False),
86 ('F', False),
87 ('FALSE', False),
87 ('FALSE', False),
88 ('0', False),
88 ('0', False),
89 ('-1', False),
89 ('-1', False),
90 ('', False)
90 ('', False)
91 ])
91 ])
92 def test_str2bool(str_bool, expected):
92 def test_str2bool(str_bool, expected):
93 from rhodecode.lib.utils2 import str2bool
93 from rhodecode.lib.utils2 import str2bool
94 assert str2bool(str_bool) == expected
94 assert str2bool(str_bool) == expected
95
95
96
96
97 @pytest.mark.parametrize("text, expected", reduce(lambda a1,a2:a1+a2, [
97 @pytest.mark.parametrize("text, expected", reduce(lambda a1,a2:a1+a2, [
98 [
98 [
99 (pref+"", []),
99 (pref+"", []),
100 (pref+"Hi there @marcink", ['marcink']),
100 (pref+"Hi there @marcink", ['marcink']),
101 (pref+"Hi there @marcink and @bob", ['bob', 'marcink']),
101 (pref+"Hi there @marcink and @bob", ['bob', 'marcink']),
102 (pref+"Hi there @marcink\n", ['marcink']),
102 (pref+"Hi there @marcink\n", ['marcink']),
103 (pref+"Hi there @marcink and @bob\n", ['bob', 'marcink']),
103 (pref+"Hi there @marcink and @bob\n", ['bob', 'marcink']),
104 (pref+"Hi there marcin@rhodecode.com", []),
104 (pref+"Hi there marcin@rhodecode.com", []),
105 (pref+"Hi there @john.malcovic and @bob\n", ['bob', 'john.malcovic']),
105 (pref+"Hi there @john.malcovic and @bob\n", ['bob', 'john.malcovic']),
106 (pref+"This needs to be reviewed: (@marcink,@john)", ["john", "marcink"]),
106 (pref+"This needs to be reviewed: (@marcink,@john)", ["john", "marcink"]),
107 (pref+"This needs to be reviewed: (@marcink, @john)", ["john", "marcink"]),
107 (pref+"This needs to be reviewed: (@marcink, @john)", ["john", "marcink"]),
108 (pref+"This needs to be reviewed: [@marcink,@john]", ["john", "marcink"]),
108 (pref+"This needs to be reviewed: [@marcink,@john]", ["john", "marcink"]),
109 (pref+"This needs to be reviewed: (@marcink @john)", ["john", "marcink"]),
109 (pref+"This needs to be reviewed: (@marcink @john)", ["john", "marcink"]),
110 (pref+"@john @mary, please review", ["john", "mary"]),
110 (pref+"@john @mary, please review", ["john", "mary"]),
111 (pref+"@john,@mary, please review", ["john", "mary"]),
111 (pref+"@john,@mary, please review", ["john", "mary"]),
112 (pref+"Hej @123, @22john,@mary, please review", ['123', '22john', 'mary']),
112 (pref+"Hej @123, @22john,@mary, please review", ['123', '22john', 'mary']),
113 (pref+"@first hi there @marcink here's my email marcin@email.com "
113 (pref+"@first hi there @marcink here's my email marcin@email.com "
114 "@lukaszb check @one_more22 it pls @ ttwelve @D[] @one@two@three ", ['first', 'lukaszb', 'marcink', 'one', 'one_more22']),
114 "@lukaszb check @one_more22 it pls @ ttwelve @D[] @one@two@three ", ['first', 'lukaszb', 'marcink', 'one', 'one_more22']),
115 (pref+"@MARCIN @maRCiN @2one_more22 @john please see this http://org.pl", ['2one_more22', 'john', 'MARCIN', 'maRCiN']),
115 (pref+"@MARCIN @maRCiN @2one_more22 @john please see this http://org.pl", ['2one_more22', 'john', 'MARCIN', 'maRCiN']),
116 (pref+"@marian.user just do it @marco-polo and next extract @marco_polo", ['marco-polo', 'marco_polo', 'marian.user']),
116 (pref+"@marian.user just do it @marco-polo and next extract @marco_polo", ['marco-polo', 'marco_polo', 'marian.user']),
117 (pref+"user.dot hej ! not-needed maril@domain.org", []),
117 (pref+"user.dot hej ! not-needed maril@domain.org", []),
118 (pref+"\n@marcin", ['marcin']),
118 (pref+"\n@marcin", ['marcin']),
119 ]
119 ]
120 for pref in ['', '\n', 'hi !', '\t', '\n\n']]), ids=no_newline_id_generator)
120 for pref in ['', '\n', 'hi !', '\t', '\n\n']]), ids=no_newline_id_generator)
121 def test_mention_extractor(text, expected):
121 def test_mention_extractor(text, expected):
122 from rhodecode.lib.utils2 import extract_mentioned_users
122 from rhodecode.lib.utils2 import extract_mentioned_users
123 got = extract_mentioned_users(text)
123 got = extract_mentioned_users(text)
124 assert sorted(got, key=lambda x: x.lower()) == got
124 assert sorted(got, key=lambda x: x.lower()) == got
125 assert set(expected) == set(got)
125 assert set(expected) == set(got)
126
126
127 @pytest.mark.parametrize("age_args, expected, kw", [
127 @pytest.mark.parametrize("age_args, expected, kw", [
128 ({}, u'just now', {}),
128 ({}, u'just now', {}),
129 ({'seconds': -1}, u'1 second ago', {}),
129 ({'seconds': -1}, u'1 second ago', {}),
130 ({'seconds': -60 * 2}, u'2 minutes ago', {}),
130 ({'seconds': -60 * 2}, u'2 minutes ago', {}),
131 ({'hours': -1}, u'1 hour ago', {}),
131 ({'hours': -1}, u'1 hour ago', {}),
132 ({'hours': -24}, u'1 day ago', {}),
132 ({'hours': -24}, u'1 day ago', {}),
133 ({'hours': -24 * 5}, u'5 days ago', {}),
133 ({'hours': -24 * 5}, u'5 days ago', {}),
134 ({'months': -1}, u'1 month ago', {}),
134 ({'months': -1}, u'1 month ago', {}),
135 ({'months': -1, 'days': -2}, u'1 month and 2 days ago', {}),
135 ({'months': -1, 'days': -2}, u'1 month and 2 days ago', {}),
136 ({'years': -1, 'months': -1}, u'1 year and 1 month ago', {}),
136 ({'years': -1, 'months': -1}, u'1 year and 1 month ago', {}),
137 ({}, u'just now', {'short_format': True}),
137 ({}, u'just now', {'short_format': True}),
138 ({'seconds': -1}, u'1sec ago', {'short_format': True}),
138 ({'seconds': -1}, u'1sec ago', {'short_format': True}),
139 ({'seconds': -60 * 2}, u'2min ago', {'short_format': True}),
139 ({'seconds': -60 * 2}, u'2min ago', {'short_format': True}),
140 ({'hours': -1}, u'1h ago', {'short_format': True}),
140 ({'hours': -1}, u'1h ago', {'short_format': True}),
141 ({'hours': -24}, u'1d ago', {'short_format': True}),
141 ({'hours': -24}, u'1d ago', {'short_format': True}),
142 ({'hours': -24 * 5}, u'5d ago', {'short_format': True}),
142 ({'hours': -24 * 5}, u'5d ago', {'short_format': True}),
143 ({'months': -1}, u'1m ago', {'short_format': True}),
143 ({'months': -1}, u'1m ago', {'short_format': True}),
144 ({'months': -1, 'days': -2}, u'1m, 2d ago', {'short_format': True}),
144 ({'months': -1, 'days': -2}, u'1m, 2d ago', {'short_format': True}),
145 ({'years': -1, 'months': -1}, u'1y, 1m ago', {'short_format': True}),
145 ({'years': -1, 'months': -1}, u'1y, 1m ago', {'short_format': True}),
146 ])
146 ])
147 def test_age(age_args, expected, kw, baseapp):
147 def test_age(age_args, expected, kw, baseapp):
148 from rhodecode.lib.utils2 import age
148 from rhodecode.lib.utils2 import age
149 from dateutil import relativedelta
149 from dateutil import relativedelta
150 n = datetime.datetime(year=2012, month=5, day=17)
150 n = datetime.datetime(year=2012, month=5, day=17)
151 delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs)
151 delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs)
152
152
153 def translate(elem):
153 def translate(elem):
154 return elem.interpolate()
154 return elem.interpolate()
155
155
156 assert translate(age(n + delt(**age_args), now=n, **kw)) == expected
156 assert translate(age(n + delt(**age_args), now=n, **kw)) == expected
157
157
158
158
159 @pytest.mark.parametrize("age_args, expected, kw", [
159 @pytest.mark.parametrize("age_args, expected, kw", [
160 ({}, u'just now', {}),
160 ({}, u'just now', {}),
161 ({'seconds': 1}, u'in 1 second', {}),
161 ({'seconds': 1}, u'in 1 second', {}),
162 ({'seconds': 60 * 2}, u'in 2 minutes', {}),
162 ({'seconds': 60 * 2}, u'in 2 minutes', {}),
163 ({'hours': 1}, u'in 1 hour', {}),
163 ({'hours': 1}, u'in 1 hour', {}),
164 ({'hours': 24}, u'in 1 day', {}),
164 ({'hours': 24}, u'in 1 day', {}),
165 ({'hours': 24 * 5}, u'in 5 days', {}),
165 ({'hours': 24 * 5}, u'in 5 days', {}),
166 ({'months': 1}, u'in 1 month', {}),
166 ({'months': 1}, u'in 1 month', {}),
167 ({'months': 1, 'days': 1}, u'in 1 month and 1 day', {}),
167 ({'months': 1, 'days': 1}, u'in 1 month and 1 day', {}),
168 ({'years': 1, 'months': 1}, u'in 1 year and 1 month', {}),
168 ({'years': 1, 'months': 1}, u'in 1 year and 1 month', {}),
169 ({}, u'just now', {'short_format': True}),
169 ({}, u'just now', {'short_format': True}),
170 ({'seconds': 1}, u'in 1sec', {'short_format': True}),
170 ({'seconds': 1}, u'in 1sec', {'short_format': True}),
171 ({'seconds': 60 * 2}, u'in 2min', {'short_format': True}),
171 ({'seconds': 60 * 2}, u'in 2min', {'short_format': True}),
172 ({'hours': 1}, u'in 1h', {'short_format': True}),
172 ({'hours': 1}, u'in 1h', {'short_format': True}),
173 ({'hours': 24}, u'in 1d', {'short_format': True}),
173 ({'hours': 24}, u'in 1d', {'short_format': True}),
174 ({'hours': 24 * 5}, u'in 5d', {'short_format': True}),
174 ({'hours': 24 * 5}, u'in 5d', {'short_format': True}),
175 ({'months': 1}, u'in 1m', {'short_format': True}),
175 ({'months': 1}, u'in 1m', {'short_format': True}),
176 ({'months': 1, 'days': 1}, u'in 1m, 1d', {'short_format': True}),
176 ({'months': 1, 'days': 1}, u'in 1m, 1d', {'short_format': True}),
177 ({'years': 1, 'months': 1}, u'in 1y, 1m', {'short_format': True}),
177 ({'years': 1, 'months': 1}, u'in 1y, 1m', {'short_format': True}),
178 ])
178 ])
179 def test_age_in_future(age_args, expected, kw, baseapp):
179 def test_age_in_future(age_args, expected, kw, baseapp):
180 from rhodecode.lib.utils2 import age
180 from rhodecode.lib.utils2 import age
181 from dateutil import relativedelta
181 from dateutil import relativedelta
182 n = datetime.datetime(year=2012, month=5, day=17)
182 n = datetime.datetime(year=2012, month=5, day=17)
183 delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs)
183 delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs)
184
184
185 def translate(elem):
185 def translate(elem):
186 return elem.interpolate()
186 return elem.interpolate()
187
187
188 assert translate(age(n + delt(**age_args), now=n, **kw)) == expected
188 assert translate(age(n + delt(**age_args), now=n, **kw)) == expected
189
189
190
190
191 @pytest.mark.parametrize("sample, expected_tags", [
191 @pytest.mark.parametrize("sample, expected_tags", [
192 # entry
192 # entry
193 ((
193 ((
194 ""
194 ""
195 ),
195 ),
196 [
196 [
197
197
198 ]),
198 ]),
199 # entry
199 # entry
200 ((
200 ((
201 "hello world [stale]"
201 "hello world [stale]"
202 ),
202 ),
203 [
203 [
204 ('state', '[stale]'),
204 ('state', '[stale]'),
205 ]),
205 ]),
206 # entry
206 # entry
207 ((
207 ((
208 "hello world [v2.0.0] [v1.0.0]"
208 "hello world [v2.0.0] [v1.0.0]"
209 ),
209 ),
210 [
210 [
211 ('generic', '[v2.0.0]'),
211 ('generic', '[v2.0.0]'),
212 ('generic', '[v1.0.0]'),
212 ('generic', '[v1.0.0]'),
213 ]),
213 ]),
214 # entry
214 # entry
215 ((
215 ((
216 "he[ll]o wo[rl]d"
216 "he[ll]o wo[rl]d"
217 ),
217 ),
218 [
218 [
219 ('label', '[ll]'),
219 ('label', '[ll]'),
220 ('label', '[rl]'),
220 ('label', '[rl]'),
221 ]),
221 ]),
222 # entry
222 # entry
223 ((
223 ((
224 "hello world [stale]\n[featured]\n[stale] [dead] [dev]"
224 "hello world [stale]\n[featured]\n[stale] [dead] [dev]"
225 ),
225 ),
226 [
226 [
227 ('state', '[stale]'),
227 ('state', '[stale]'),
228 ('state', '[featured]'),
228 ('state', '[featured]'),
229 ('state', '[stale]'),
229 ('state', '[stale]'),
230 ('state', '[dead]'),
230 ('state', '[dead]'),
231 ('state', '[dev]'),
231 ('state', '[dev]'),
232 ]),
232 ]),
233 # entry
233 # entry
234 ((
234 ((
235 "hello world \n\n [stale] \n [url =&gt; [name](http://rc.com)]"
235 "hello world \n\n [stale] \n [url =&gt; [name](http://rc.com)]"
236 ),
236 ),
237 [
237 [
238 ('state', '[stale]'),
238 ('state', '[stale]'),
239 ('url', '[url =&gt; [name](http://rc.com)]'),
239 ('url', '[url =&gt; [name](http://rc.com)]'),
240 ]),
240 ]),
241 # entry
241 # entry
242 ((
242 ((
243 "[url =&gt; [linkNameJS](javascript:alert(document.domain))]\n"
243 "[url =&gt; [linkNameJS](javascript:alert(document.domain))]\n"
244 "[url =&gt; [linkNameHTTP](http://rhodecode.com)]\n"
244 "[url =&gt; [linkNameHTTP](http://rhodecode.com)]\n"
245 "[url =&gt; [linkNameHTTPS](https://rhodecode.com)]\n"
245 "[url =&gt; [linkNameHTTPS](https://rhodecode.com)]\n"
246 "[url =&gt; [linkNamePath](/repo_group)]\n"
246 "[url =&gt; [linkNamePath](/repo_group)]\n"
247 ),
247 ),
248 [
248 [
249 ('generic', '[linkNameJS]'),
249 ('generic', '[linkNameJS]'),
250 ('url', '[url =&gt; [linkNameHTTP](http://rhodecode.com)]'),
250 ('url', '[url =&gt; [linkNameHTTP](http://rhodecode.com)]'),
251 ('url', '[url =&gt; [linkNameHTTPS](https://rhodecode.com)]'),
251 ('url', '[url =&gt; [linkNameHTTPS](https://rhodecode.com)]'),
252 ('url', '[url =&gt; [linkNamePath](/repo_group)]'),
252 ('url', '[url =&gt; [linkNamePath](/repo_group)]'),
253 ]),
253 ]),
254 # entry
254 # entry
255 ((
255 ((
256 "hello pta[tag] gog [[]] [[] sda ero[or]d [me =&gt;>< sa]"
256 "hello pta[tag] gog [[]] [[] sda ero[or]d [me =&gt;>< sa]"
257 "[requires] [stale] [see<>=&gt;] [see =&gt; http://url.com]"
257 "[requires] [stale] [see<>=&gt;] [see =&gt; http://url.com]"
258 "[requires =&gt; url] [lang =&gt; python] [just a tag] "
258 "[requires =&gt; url] [lang =&gt; python] [just a tag] "
259 "<html_tag first='abc' attr=\"my.url?attr=&another=\"></html_tag>"
259 "<html_tag first='abc' attr=\"my.url?attr=&another=\"></html_tag>"
260 "[,d] [ =&gt; ULR ] [obsolete] [desc]]"
260 "[,d] [ =&gt; ULR ] [obsolete] [desc]]"
261 ),
261 ),
262 [
262 [
263 ('label', '[desc]'),
263 ('label', '[desc]'),
264 ('label', '[obsolete]'),
264 ('label', '[obsolete]'),
265 ('label', '[or]'),
265 ('label', '[or]'),
266 ('label', '[requires]'),
266 ('label', '[requires]'),
267 ('label', '[tag]'),
267 ('label', '[tag]'),
268 ('state', '[stale]'),
268 ('state', '[stale]'),
269 ('lang', '[lang =&gt; python]'),
269 ('lang', '[lang =&gt; python]'),
270 ('ref', '[requires =&gt; url]'),
270 ('ref', '[requires =&gt; url]'),
271 ('see', '[see =&gt; http://url.com]'),
271 ('see', '[see =&gt; http://url.com]'),
272
272
273 ]),
273 ]),
274
274
275 ], ids=no_newline_id_generator)
275 ], ids=no_newline_id_generator)
276 def test_metatag_extraction(sample, expected_tags):
276 def test_metatag_extraction(sample, expected_tags):
277 from rhodecode.lib.helpers import extract_metatags
277 from rhodecode.lib.helpers import extract_metatags
278 tags, value = extract_metatags(sample)
278 tags, value = extract_metatags(sample)
279 assert sorted(tags) == sorted(expected_tags)
279 assert sorted(tags) == sorted(expected_tags)
280
280
281
281
282 @pytest.mark.parametrize("tag_data, expected_html", [
282 @pytest.mark.parametrize("tag_data, expected_html", [
283
283
284 (('state', '[stable]'), '<div class="metatag" tag="state stable">stable</div>'),
284 (('state', '[stable]'), '<div class="metatag" tag="state stable">stable</div>'),
285 (('state', '[stale]'), '<div class="metatag" tag="state stale">stale</div>'),
285 (('state', '[stale]'), '<div class="metatag" tag="state stale">stale</div>'),
286 (('state', '[featured]'), '<div class="metatag" tag="state featured">featured</div>'),
286 (('state', '[featured]'), '<div class="metatag" tag="state featured">featured</div>'),
287 (('state', '[dev]'), '<div class="metatag" tag="state dev">dev</div>'),
287 (('state', '[dev]'), '<div class="metatag" tag="state dev">dev</div>'),
288 (('state', '[dead]'), '<div class="metatag" tag="state dead">dead</div>'),
288 (('state', '[dead]'), '<div class="metatag" tag="state dead">dead</div>'),
289
289
290 (('label', '[personal]'), '<div class="metatag" tag="label">personal</div>'),
290 (('label', '[personal]'), '<div class="metatag" tag="label">personal</div>'),
291 (('generic', '[v2.0.0]'), '<div class="metatag" tag="generic">v2.0.0</div>'),
291 (('generic', '[v2.0.0]'), '<div class="metatag" tag="generic">v2.0.0</div>'),
292
292
293 (('lang', '[lang =&gt; JavaScript]'), '<div class="metatag" tag="lang">JavaScript</div>'),
293 (('lang', '[lang =&gt; JavaScript]'), '<div class="metatag" tag="lang">JavaScript</div>'),
294 (('lang', '[lang =&gt; C++]'), '<div class="metatag" tag="lang">C++</div>'),
294 (('lang', '[lang =&gt; C++]'), '<div class="metatag" tag="lang">C++</div>'),
295 (('lang', '[lang =&gt; C#]'), '<div class="metatag" tag="lang">C#</div>'),
295 (('lang', '[lang =&gt; C#]'), '<div class="metatag" tag="lang">C#</div>'),
296 (('lang', '[lang =&gt; Delphi/Object]'), '<div class="metatag" tag="lang">Delphi/Object</div>'),
296 (('lang', '[lang =&gt; Delphi/Object]'), '<div class="metatag" tag="lang">Delphi/Object</div>'),
297 (('lang', '[lang =&gt; Objective-C]'), '<div class="metatag" tag="lang">Objective-C</div>'),
297 (('lang', '[lang =&gt; Objective-C]'), '<div class="metatag" tag="lang">Objective-C</div>'),
298 (('lang', '[lang =&gt; .NET]'), '<div class="metatag" tag="lang">.NET</div>'),
298 (('lang', '[lang =&gt; .NET]'), '<div class="metatag" tag="lang">.NET</div>'),
299
299
300 (('license', '[license =&gt; BSD 3-clause]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/BSD 3-clause">BSD 3-clause</a></div>'),
300 (('license', '[license =&gt; BSD 3-clause]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/BSD 3-clause">BSD 3-clause</a></div>'),
301 (('license', '[license =&gt; GPLv3]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/GPLv3">GPLv3</a></div>'),
301 (('license', '[license =&gt; GPLv3]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/GPLv3">GPLv3</a></div>'),
302 (('license', '[license =&gt; MIT]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/MIT">MIT</a></div>'),
302 (('license', '[license =&gt; MIT]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/MIT">MIT</a></div>'),
303 (('license', '[license =&gt; AGPLv3]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/AGPLv3">AGPLv3</a></div>'),
303 (('license', '[license =&gt; AGPLv3]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/AGPLv3">AGPLv3</a></div>'),
304
304
305 (('ref', '[requires =&gt; RepoName]'), '<div class="metatag" tag="ref requires">requires: <a href="/RepoName">RepoName</a></div>'),
305 (('ref', '[requires =&gt; RepoName]'), '<div class="metatag" tag="ref requires">requires: <a href="/RepoName">RepoName</a></div>'),
306 (('ref', '[recommends =&gt; GroupName]'), '<div class="metatag" tag="ref recommends">recommends: <a href="/GroupName">GroupName</a></div>'),
306 (('ref', '[recommends =&gt; GroupName]'), '<div class="metatag" tag="ref recommends">recommends: <a href="/GroupName">GroupName</a></div>'),
307 (('ref', '[conflicts =&gt; SomeName]'), '<div class="metatag" tag="ref conflicts">conflicts: <a href="/SomeName">SomeName</a></div>'),
307 (('ref', '[conflicts =&gt; SomeName]'), '<div class="metatag" tag="ref conflicts">conflicts: <a href="/SomeName">SomeName</a></div>'),
308 (('ref', '[base =&gt; SomeName]'), '<div class="metatag" tag="ref base">base: <a href="/SomeName">SomeName</a></div>'),
308 (('ref', '[base =&gt; SomeName]'), '<div class="metatag" tag="ref base">base: <a href="/SomeName">SomeName</a></div>'),
309
309
310 (('see', '[see =&gt; http://rhodecode.com]'), '<div class="metatag" tag="see">see: http://rhodecode.com </div>'),
310 (('see', '[see =&gt; http://rhodecode.com]'), '<div class="metatag" tag="see">see: http://rhodecode.com </div>'),
311
311
312 (('url', '[url =&gt; [linkName](https://rhodecode.com)]'), '<div class="metatag" tag="url"> <a href="https://rhodecode.com">linkName</a> </div>'),
312 (('url', '[url =&gt; [linkName](https://rhodecode.com)]'), '<div class="metatag" tag="url"> <a href="https://rhodecode.com">linkName</a> </div>'),
313 (('url', '[url =&gt; [example link](https://rhodecode.com)]'), '<div class="metatag" tag="url"> <a href="https://rhodecode.com">example link</a> </div>'),
313 (('url', '[url =&gt; [example link](https://rhodecode.com)]'), '<div class="metatag" tag="url"> <a href="https://rhodecode.com">example link</a> </div>'),
314 (('url', '[url =&gt; [v1.0.0](https://rhodecode.com)]'), '<div class="metatag" tag="url"> <a href="https://rhodecode.com">v1.0.0</a> </div>'),
314 (('url', '[url =&gt; [v1.0.0](https://rhodecode.com)]'), '<div class="metatag" tag="url"> <a href="https://rhodecode.com">v1.0.0</a> </div>'),
315
315
316 ])
316 ])
317 def test_metatags_stylize(tag_data, expected_html):
317 def test_metatags_stylize(tag_data, expected_html):
318 from rhodecode.lib.helpers import style_metatag
318 from rhodecode.lib.helpers import style_metatag
319 tag_type,value = tag_data
319 tag_type,value = tag_data
320 assert style_metatag(tag_type, value) == expected_html
320 assert style_metatag(tag_type, value) == expected_html
321
321
322
322
323 @pytest.mark.parametrize("tmpl_url, email, expected", [
323 @pytest.mark.parametrize("tmpl_url, email, expected", [
324 ('http://test.com/{email}', 'test@foo.com', 'http://test.com/test@foo.com'),
324 ('http://test.com/{email}', 'test@foo.com', 'http://test.com/test@foo.com'),
325
325
326 ('http://test.com/{md5email}', 'test@foo.com', 'http://test.com/3cb7232fcc48743000cb86d0d5022bd9'),
326 ('http://test.com/{md5email}', 'test@foo.com', 'http://test.com/3cb7232fcc48743000cb86d0d5022bd9'),
327 ('http://test.com/{md5email}', 'testΔ…Δ‡@foo.com', 'http://test.com/978debb907a3c55cd741872ab293ef30'),
327 ('http://test.com/{md5email}', 'testΔ…Δ‡@foo.com', 'http://test.com/978debb907a3c55cd741872ab293ef30'),
328
328
329 ('http://testX.com/{md5email}?s={size}', 'test@foo.com', 'http://testX.com/3cb7232fcc48743000cb86d0d5022bd9?s=24'),
329 ('http://testX.com/{md5email}?s={size}', 'test@foo.com', 'http://testX.com/3cb7232fcc48743000cb86d0d5022bd9?s=24'),
330 ('http://testX.com/{md5email}?s={size}', 'testΔ…Δ‡@foo.com', 'http://testX.com/978debb907a3c55cd741872ab293ef30?s=24'),
330 ('http://testX.com/{md5email}?s={size}', 'testΔ…Δ‡@foo.com', 'http://testX.com/978debb907a3c55cd741872ab293ef30?s=24'),
331
331
332 ('{scheme}://{netloc}/{md5email}/{size}', 'test@foo.com', 'https://server.com/3cb7232fcc48743000cb86d0d5022bd9/24'),
332 ('{scheme}://{netloc}/{md5email}/{size}', 'test@foo.com', 'https://server.com/3cb7232fcc48743000cb86d0d5022bd9/24'),
333 ('{scheme}://{netloc}/{md5email}/{size}', 'testΔ…Δ‡@foo.com', 'https://server.com/978debb907a3c55cd741872ab293ef30/24'),
333 ('{scheme}://{netloc}/{md5email}/{size}', 'testΔ…Δ‡@foo.com', 'https://server.com/978debb907a3c55cd741872ab293ef30/24'),
334
334
335 ('http://test.com/{email}', 'testΔ…Δ‡@foo.com', 'http://test.com/testΔ…Δ‡@foo.com'),
335 ('http://test.com/{email}', 'testΔ…Δ‡@foo.com', 'http://test.com/testΔ…Δ‡@foo.com'),
336 ('http://test.com/{email}?size={size}', 'test@foo.com', 'http://test.com/test@foo.com?size=24'),
336 ('http://test.com/{email}?size={size}', 'test@foo.com', 'http://test.com/test@foo.com?size=24'),
337 ('http://test.com/{email}?size={size}', 'testΔ…Δ‡@foo.com', 'http://test.com/testΔ…Δ‡@foo.com?size=24'),
337 ('http://test.com/{email}?size={size}', 'testΔ…Δ‡@foo.com', 'http://test.com/testΔ…Δ‡@foo.com?size=24'),
338 ])
338 ])
339 def test_gravatar_url_builder(tmpl_url, email, expected, request_stub):
339 def test_gravatar_url_builder(tmpl_url, email, expected, request_stub):
340 from rhodecode.lib.helpers import gravatar_url
340 from rhodecode.lib.helpers import gravatar_url
341
341
342 def fake_tmpl_context(_url):
342 def fake_tmpl_context(_url):
343 _c = AttributeDict()
343 _c = AttributeDict()
344 _c.visual = AttributeDict()
344 _c.visual = AttributeDict()
345 _c.visual.use_gravatar = True
345 _c.visual.use_gravatar = True
346 _c.visual.gravatar_url = _url
346 _c.visual.gravatar_url = _url
347 return _c
347 return _c
348
348
349 # mock pyramid.threadlocals
349 # mock pyramid.threadlocals
350 def fake_get_current_request():
350 def fake_get_current_request():
351 request_stub.scheme = 'https'
351 request_stub.scheme = 'https'
352 request_stub.host = 'server.com'
352 request_stub.host = 'server.com'
353
353
354 request_stub._call_context = fake_tmpl_context(tmpl_url)
354 request_stub._call_context = fake_tmpl_context(tmpl_url)
355 return request_stub
355 return request_stub
356
356
357 with mock.patch('rhodecode.lib.helpers.get_current_request',
357 with mock.patch('rhodecode.lib.helpers.get_current_request',
358 fake_get_current_request):
358 fake_get_current_request):
359
359
360 grav = gravatar_url(email_address=email, size=24)
360 grav = gravatar_url(email_address=email, size=24)
361 assert grav == expected
361 assert grav == expected
362
362
363
363
364 @pytest.mark.parametrize(
364 @pytest.mark.parametrize(
365 "email, first_name, last_name, expected_initials, expected_color", [
365 "email, first_name, last_name, expected_initials, expected_color", [
366
366
367 ('test@rhodecode.com', '', '', 'TR', '#8a994d'),
367 ('test@rhodecode.com', '', '', 'TR', '#8a994d'),
368 ('marcin.kuzminski@rhodecode.com', '', '', 'MK', '#6559b3'),
368 ('marcin.kuzminski@rhodecode.com', '', '', 'MK', '#6559b3'),
369 # special cases of email
369 # special cases of email
370 ('john.van.dam@rhodecode.com', '', '', 'JD', '#526600'),
370 ('john.van.dam@rhodecode.com', '', '', 'JD', '#526600'),
371 ('Guido.van.Rossum@rhodecode.com', '', '', 'GR', '#990052'),
371 ('Guido.van.Rossum@rhodecode.com', '', '', 'GR', '#990052'),
372 ('Guido.van.Rossum@rhodecode.com', 'Guido', 'Van Rossum', 'GR', '#990052'),
372 ('Guido.van.Rossum@rhodecode.com', 'Guido', 'Van Rossum', 'GR', '#990052'),
373
373
374 ('rhodecode+Guido.van.Rossum@rhodecode.com', '', '', 'RR', '#46598c'),
374 ('rhodecode+Guido.van.Rossum@rhodecode.com', '', '', 'RR', '#46598c'),
375 ('pclouds@rhodecode.com', 'Nguyα»…n ThΓ‘i', 'Tgọc Duy', 'ND', '#665200'),
375 ('pclouds@rhodecode.com', 'Nguyα»…n ThΓ‘i', 'Tgọc Duy', 'ND', '#665200'),
376
376
377 ('john-brown@foo.com', '', '', 'JF', '#73006b'),
377 ('john-brown@foo.com', '', '', 'JF', '#73006b'),
378 ('admin@rhodecode.com', 'Marcin', 'Kuzminski', 'MK', '#104036'),
378 ('admin@rhodecode.com', 'Marcin', 'Kuzminski', 'MK', '#104036'),
379 # partials
379 # partials
380 ('admin@rhodecode.com', 'Marcin', '', 'MR', '#104036'), # fn+email
380 ('admin@rhodecode.com', 'Marcin', '', 'MR', '#104036'), # fn+email
381 ('admin@rhodecode.com', '', 'Kuzminski', 'AK', '#104036'), # em+ln
381 ('admin@rhodecode.com', '', 'Kuzminski', 'AK', '#104036'), # em+ln
382 # non-ascii
382 # non-ascii
383 ('admin@rhodecode.com', 'Marcin', 'Śuzminski', 'MS', '#104036'),
383 ('admin@rhodecode.com', 'Marcin', 'Śuzminski', 'MS', '#104036'),
384 ('marcin.Ε›uzminski@rhodecode.com', '', '', 'MS', '#73000f'),
384 ('marcin.Ε›uzminski@rhodecode.com', '', '', 'MS', '#73000f'),
385
385
386 # special cases, LDAP can provide those...
386 # special cases, LDAP can provide those...
387 ('admin@', 'Marcin', 'Śuzminski', 'MS', '#aa00ff'),
387 ('admin@', 'Marcin', 'Śuzminski', 'MS', '#aa00ff'),
388 ('marcin.Ε›uzminski', '', '', 'MS', '#402020'),
388 ('marcin.Ε›uzminski', '', '', 'MS', '#402020'),
389 ('null', '', '', 'NL', '#8c4646'),
389 ('null', '', '', 'NL', '#8c4646'),
390 ('some.@abc.com', 'some', '', 'SA', '#664e33')
390 ('some.@abc.com', 'some', '', 'SA', '#664e33')
391 ])
391 ])
392 def test_initials_gravatar_pick_of_initials_and_color_algo(
392 def test_initials_gravatar_pick_of_initials_and_color_algo(
393 email, first_name, last_name, expected_initials, expected_color):
393 email, first_name, last_name, expected_initials, expected_color):
394 instance = InitialsGravatar(email, first_name, last_name)
394 instance = InitialsGravatar(email, first_name, last_name)
395 assert instance.get_initials() == expected_initials
395 assert instance.get_initials() == expected_initials
396 assert instance.str2color(email) == expected_color
396 assert instance.str2color(email) == expected_color
397
397
398
398
399 def test_initials_gravatar_mapping_algo():
399 def test_initials_gravatar_mapping_algo():
400 pos = set()
400 pos = set()
401 instance = InitialsGravatar('', '', '')
401 instance = InitialsGravatar('', '', '')
402 iterations = 0
402 iterations = 0
403
403
404 variations = []
404 variations = []
405 for letter1 in string.ascii_letters:
405 for letter1 in string.ascii_letters:
406 for letter2 in string.ascii_letters[::-1][:10]:
406 for letter2 in string.ascii_letters[::-1][:10]:
407 for letter3 in string.ascii_letters[:10]:
407 for letter3 in string.ascii_letters[:10]:
408 variations.append(
408 variations.append(
409 '%s@rhodecode.com' % (letter1+letter2+letter3))
409 '%s@rhodecode.com' % (letter1+letter2+letter3))
410
410
411 max_variations = 4096
411 max_variations = 4096
412 for email in variations[:max_variations]:
412 for email in variations[:max_variations]:
413 iterations += 1
413 iterations += 1
414 pos.add(
414 pos.add(
415 instance.pick_color_bank_index(email,
415 instance.pick_color_bank_index(email,
416 instance.get_color_bank()))
416 instance.get_color_bank()))
417
417
418 # we assume that we have match all 256 possible positions,
418 # we assume that we have match all 256 possible positions,
419 # in reasonable amount of different email addresses
419 # in reasonable amount of different email addresses
420 assert len(pos) == 256
420 assert len(pos) == 256
421 assert iterations == max_variations
421 assert iterations == max_variations
422
422
423
423
424 @pytest.mark.parametrize("tmpl, repo_name, overrides, prefix, expected", [
424 @pytest.mark.parametrize("tmpl, repo_name, overrides, prefix, expected", [
425 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '', 'http://vps1:8000/group/repo1'),
425 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '', 'http://vps1:8000/group/repo1'),
426 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/group/repo1'),
426 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/group/repo1'),
427 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '/rc', 'http://vps1:8000/rc/group/repo1'),
427 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '/rc', 'http://vps1:8000/rc/group/repo1'),
428 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/rc', 'http://user@vps1:8000/rc/group/repo1'),
428 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/rc', 'http://user@vps1:8000/rc/group/repo1'),
429 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '/rc', 'http://marcink@vps1:8000/rc/group/repo1'),
429 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '/rc', 'http://marcink@vps1:8000/rc/group/repo1'),
430 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/rc/', 'http://user@vps1:8000/rc/group/repo1'),
430 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/rc/', 'http://user@vps1:8000/rc/group/repo1'),
431 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '/rc/', 'http://marcink@vps1:8000/rc/group/repo1'),
431 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '/rc/', 'http://marcink@vps1:8000/rc/group/repo1'),
432 ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {}, '', 'http://vps1:8000/_23'),
432 ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {}, '', 'http://vps1:8000/_23'),
433 ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/_23'),
433 ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/_23'),
434 ('http://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/_23'),
434 ('http://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/_23'),
435 ('http://{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://vps1:8000/_23'),
435 ('http://{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://vps1:8000/_23'),
436 ('https://{user}@proxy1.server.com/{repo}', 'group/repo1', {'user': 'marcink'}, '', 'https://marcink@proxy1.server.com/group/repo1'),
436 ('https://{user}@proxy1.server.com/{repo}', 'group/repo1', {'user': 'marcink'}, '', 'https://marcink@proxy1.server.com/group/repo1'),
437 ('https://{user}@proxy1.server.com/{repo}', 'group/repo1', {}, '', 'https://proxy1.server.com/group/repo1'),
437 ('https://{user}@proxy1.server.com/{repo}', 'group/repo1', {}, '', 'https://proxy1.server.com/group/repo1'),
438 ('https://proxy1.server.com/{user}/{repo}', 'group/repo1', {'user': 'marcink'}, '', 'https://proxy1.server.com/marcink/group/repo1'),
438 ('https://proxy1.server.com/{user}/{repo}', 'group/repo1', {'user': 'marcink'}, '', 'https://proxy1.server.com/marcink/group/repo1'),
439 ])
439 ])
440 def test_clone_url_generator(tmpl, repo_name, overrides, prefix, expected):
440 def test_clone_url_generator(tmpl, repo_name, overrides, prefix, expected):
441 from rhodecode.lib.utils2 import get_clone_url
441 from rhodecode.lib.utils2 import get_clone_url
442
442
443 class RequestStub(object):
443 class RequestStub(object):
444 def request_url(self, name):
444 def request_url(self, name):
445 return 'http://vps1:8000' + prefix
445 return 'http://vps1:8000' + prefix
446
446
447 def route_url(self, name):
447 def route_url(self, name):
448 return self.request_url(name)
448 return self.request_url(name)
449
449
450 clone_url = get_clone_url(
450 clone_url = get_clone_url(
451 request=RequestStub(),
451 request=RequestStub(),
452 uri_tmpl=tmpl,
452 uri_tmpl=tmpl,
453 repo_name=repo_name, repo_id=23, **overrides)
453 repo_name=repo_name, repo_id=23, **overrides)
454 assert clone_url == expected
454 assert clone_url == expected
455
455
456
456
457 def _quick_url(text, tmpl="""<a class="revision-link" href="%s">%s</a>""", url_=None):
457 def _quick_url(text, tmpl="""<a class="revision-link" href="%s">%s</a>""", url_=None):
458 """
458 """
459 Changes `some text url[foo]` => `some text <a href="/">foo</a>
459 Changes `some text url[foo]` => `some text <a href="/">foo</a>
460
460
461 :param text:
461 :param text:
462 """
462 """
463 import re
463 import re
464 # quickly change expected url[] into a link
464 # quickly change expected url[] into a link
465 URL_PAT = re.compile(r'(?:url\[)(.+?)(?:\])')
465 URL_PAT = re.compile(r'(?:url\[)(.+?)(?:\])')
466
466
467 def url_func(match_obj):
467 def url_func(match_obj):
468 _url = match_obj.groups()[0]
468 _url = match_obj.groups()[0]
469 return tmpl % (url_ or '/some-url', _url)
469 return tmpl % (url_ or '/some-url', _url)
470 return URL_PAT.sub(url_func, text)
470 return URL_PAT.sub(url_func, text)
471
471
472
472
473 @pytest.mark.parametrize("sample, expected", [
473 @pytest.mark.parametrize("sample, expected", [
474 ("",
474 ("",
475 ""),
475 ""),
476 ("git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68",
476 ("git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68",
477 "git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68"),
477 "git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68"),
478 ("from rev 000000000000",
478 ("from rev 000000000000",
479 "from rev url[000000000000]"),
479 "from rev url[000000000000]"),
480 ("from rev 000000000000123123 also rev 000000000000",
480 ("from rev 000000000000123123 also rev 000000000000",
481 "from rev url[000000000000123123] also rev url[000000000000]"),
481 "from rev url[000000000000123123] also rev url[000000000000]"),
482 ("this should-000 00",
482 ("this should-000 00",
483 "this should-000 00"),
483 "this should-000 00"),
484 ("longtextffffffffff rev 123123123123",
484 ("longtextffffffffff rev 123123123123",
485 "longtextffffffffff rev url[123123123123]"),
485 "longtextffffffffff rev url[123123123123]"),
486 ("rev ffffffffffffffffffffffffffffffffffffffffffffffffff",
486 ("rev ffffffffffffffffffffffffffffffffffffffffffffffffff",
487 "rev ffffffffffffffffffffffffffffffffffffffffffffffffff"),
487 "rev ffffffffffffffffffffffffffffffffffffffffffffffffff"),
488 ("ffffffffffff some text traalaa",
488 ("ffffffffffff some text traalaa",
489 "url[ffffffffffff] some text traalaa"),
489 "url[ffffffffffff] some text traalaa"),
490 ("""Multi line
490 ("""Multi line
491 123123123123
491 123123123123
492 some text 123123123123
492 some text 123123123123
493 sometimes !
493 sometimes !
494 """,
494 """,
495 """Multi line
495 """Multi line
496 url[123123123123]
496 url[123123123123]
497 some text url[123123123123]
497 some text url[123123123123]
498 sometimes !
498 sometimes !
499 """)
499 """)
500 ], ids=no_newline_id_generator)
500 ], ids=no_newline_id_generator)
501 def test_urlify_commits(sample, expected):
501 def test_urlify_commits(sample, expected):
502 def fake_url(self, *args, **kwargs):
502 def fake_url(self, *args, **kwargs):
503 return '/some-url'
503 return '/some-url'
504
504
505 expected = _quick_url(expected)
505 expected = _quick_url(expected)
506
506
507 with mock.patch('rhodecode.lib.helpers.route_url', fake_url):
507 with mock.patch('rhodecode.lib.helpers.route_url', fake_url):
508 from rhodecode.lib.helpers import urlify_commits
508 from rhodecode.lib.helpers import urlify_commits
509 assert urlify_commits(sample, 'repo_name') == expected
509 assert urlify_commits(sample, 'repo_name') == expected
510
510
511
511
512 @pytest.mark.parametrize("sample, expected, url_", [
512 @pytest.mark.parametrize("sample, expected, url_", [
513 ("",
513 ("",
514 "",
514 "",
515 ""),
515 ""),
516 ("https://svn.apache.org/repos",
516 ("https://svn.apache.org/repos",
517 "url[https://svn.apache.org/repos]",
517 "url[https://svn.apache.org/repos]",
518 "https://svn.apache.org/repos"),
518 "https://svn.apache.org/repos"),
519 ("http://svn.apache.org/repos",
519 ("http://svn.apache.org/repos",
520 "url[http://svn.apache.org/repos]",
520 "url[http://svn.apache.org/repos]",
521 "http://svn.apache.org/repos"),
521 "http://svn.apache.org/repos"),
522 ("from rev a also rev http://google.com",
522 ("from rev a also rev http://google.com",
523 "from rev a also rev url[http://google.com]",
523 "from rev a also rev url[http://google.com]",
524 "http://google.com"),
524 "http://google.com"),
525 ("""Multi line
525 ("""Multi line
526 https://foo.bar.com
526 https://foo.bar.com
527 some text lalala""",
527 some text lalala""",
528 """Multi line
528 """Multi line
529 url[https://foo.bar.com]
529 url[https://foo.bar.com]
530 some text lalala""",
530 some text lalala""",
531 "https://foo.bar.com")
531 "https://foo.bar.com")
532 ], ids=no_newline_id_generator)
532 ], ids=no_newline_id_generator)
533 def test_urlify_test(sample, expected, url_):
533 def test_urlify_test(sample, expected, url_):
534 from rhodecode.lib.helpers import urlify_text
534 from rhodecode.lib.helpers import urlify_text
535 expected = _quick_url(expected, tmpl="""<a href="%s">%s</a>""", url_=url_)
535 expected = _quick_url(expected, tmpl="""<a href="%s">%s</a>""", url_=url_)
536 assert urlify_text(sample) == expected
536 assert urlify_text(sample) == expected
537
537
538
538
539 @pytest.mark.parametrize("test, expected", [
539 @pytest.mark.parametrize("test, expected", [
540 ("", None),
540 ("", None),
541 ("/_2", '2'),
541 ("/_2", '2'),
542 ("_2", '2'),
542 ("_2", '2'),
543 ("/_2/", '2'),
543 ("/_2/", '2'),
544 ("_2/", '2'),
544 ("_2/", '2'),
545
545
546 ("/_21", '21'),
546 ("/_21", '21'),
547 ("_21", '21'),
547 ("_21", '21'),
548 ("/_21/", '21'),
548 ("/_21/", '21'),
549 ("_21/", '21'),
549 ("_21/", '21'),
550
550
551 ("/_21/foobar", '21'),
551 ("/_21/foobar", '21'),
552 ("_21/121", '21'),
552 ("_21/121", '21'),
553 ("/_21/_12", '21'),
553 ("/_21/_12", '21'),
554 ("_21/rc/foo", '21'),
554 ("_21/rc/foo", '21'),
555
555
556 ])
556 ])
557 def test_get_repo_by_id(test, expected):
557 def test_get_repo_by_id(test, expected):
558 from rhodecode.model.repo import RepoModel
558 from rhodecode.model.repo import RepoModel
559 _test = RepoModel()._extract_id_from_repo_name(test)
559 _test = RepoModel()._extract_id_from_repo_name(test)
560 assert _test == expected
560 assert _test == expected
561
561
562
562
563 def test_invalidation_context(baseapp):
563 def test_invalidation_context(baseapp):
564 repo_id = 999
564 repo_id = 999
565
565
566 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
566 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
567 repo_id, CacheKey.CACHE_TYPE_README)
567 repo_id, CacheKey.CACHE_TYPE_README)
568 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
568 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
569 repo_id=repo_id)
569 repo_id=repo_id)
570 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
570 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
571
571
572 calls = [1, 2]
572 calls = [1, 2]
573
573
574 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
574 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
575 def _dummy_func(cache_key):
575 def _dummy_func(cache_key):
576 val = calls.pop(0)
576 val = calls.pop(0)
577 return 'result:{}'.format(val)
577 return 'result:{}'.format(val)
578
578
579 inv_context_manager = rc_cache.InvalidationContext(
579 inv_context_manager = rc_cache.InvalidationContext(
580 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
580 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
581
581
582 # 1st call, fresh caches
582 # 1st call, fresh caches
583 with inv_context_manager as invalidation_context:
583 with inv_context_manager as invalidation_context:
584 should_invalidate = invalidation_context.should_invalidate()
584 should_invalidate = invalidation_context.should_invalidate()
585 if should_invalidate:
585 if should_invalidate:
586 _dummy_func.invalidate('some-key')
586 result = _dummy_func.refresh('some-key')
587 result = _dummy_func('some-key')
587 else:
588 result = _dummy_func('some-key')
588
589
589 assert isinstance(invalidation_context, rc_cache.FreshRegionCache)
590 assert isinstance(invalidation_context, rc_cache.FreshRegionCache)
590 assert should_invalidate is True
591 assert should_invalidate is True
591
592
592 assert 'result:1' == result
593 assert 'result:1' == result
593 # should be cached so calling it twice will give the same result !
594 # should be cached so calling it twice will give the same result !
594 result = _dummy_func('some-key')
595 result = _dummy_func('some-key')
595 assert 'result:1' == result
596 assert 'result:1' == result
596
597
597 # 2nd call, we create a new context manager, this should be now key aware, and
598 # 2nd call, we create a new context manager, this should be now key aware, and
598 # return an active cache region
599 # return an active cache region
599 with inv_context_manager as invalidation_context:
600 with inv_context_manager as invalidation_context:
600 should_invalidate = invalidation_context.should_invalidate()
601 should_invalidate = invalidation_context.should_invalidate()
601 assert isinstance(invalidation_context, rc_cache.ActiveRegionCache)
602 assert isinstance(invalidation_context, rc_cache.ActiveRegionCache)
602 assert should_invalidate is False
603 assert should_invalidate is False
603
604
604 # Mark invalidation
605 # Mark invalidation
605 CacheKey.set_invalidate(invalidation_namespace)
606 CacheKey.set_invalidate(invalidation_namespace)
606
607
607 # 3nd call, fresh caches
608 # 3nd call, fresh caches
608 with inv_context_manager as invalidation_context:
609 with inv_context_manager as invalidation_context:
609 should_invalidate = invalidation_context.should_invalidate()
610 should_invalidate = invalidation_context.should_invalidate()
610 if should_invalidate:
611 if should_invalidate:
611 _dummy_func.invalidate('some-key')
612 result = _dummy_func.refresh('some-key')
612 result = _dummy_func('some-key')
613 else:
614 result = _dummy_func('some-key')
613
615
614 assert isinstance(invalidation_context, rc_cache.FreshRegionCache)
616 assert isinstance(invalidation_context, rc_cache.FreshRegionCache)
615 assert should_invalidate is True
617 assert should_invalidate is True
616
618
617 assert 'result:2' == result
619 assert 'result:2' == result
618
620
619 # cached again, same result
621 # cached again, same result
620 result = _dummy_func('some-key')
622 result = _dummy_func('some-key')
621 assert 'result:2' == result
623 assert 'result:2' == result
622
624
623
625
624 def test_invalidation_context_exception_in_compute(baseapp):
626 def test_invalidation_context_exception_in_compute(baseapp):
625 repo_id = 888
627 repo_id = 888
626
628
627 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
629 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
628 repo_id, CacheKey.CACHE_TYPE_README)
630 repo_id, CacheKey.CACHE_TYPE_README)
629 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
631 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
630 repo_id=repo_id)
632 repo_id=repo_id)
631 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
633 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
632
634
633 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
635 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
634 def _dummy_func(cache_key):
636 def _dummy_func(cache_key):
635 raise Exception('Error in cache func')
637 raise Exception('Error in cache func')
636
638
637 with pytest.raises(Exception):
639 with pytest.raises(Exception):
638 inv_context_manager = rc_cache.InvalidationContext(
640 inv_context_manager = rc_cache.InvalidationContext(
639 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
641 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
640
642
641 # 1st call, fresh caches
643 # 1st call, fresh caches
642 with inv_context_manager as invalidation_context:
644 with inv_context_manager as invalidation_context:
643 should_invalidate = invalidation_context.should_invalidate()
645 should_invalidate = invalidation_context.should_invalidate()
644 if should_invalidate:
646 if should_invalidate:
645 _dummy_func.invalidate('some-key-2')
647 _dummy_func.refresh('some-key-2')
646 _dummy_func('some-key-2')
648 else:
649 _dummy_func('some-key-2')
647
650
648
651
649 @pytest.mark.parametrize('execution_number', range(5))
652 @pytest.mark.parametrize('execution_number', range(5))
650 def test_cache_invalidation_race_condition(execution_number, baseapp):
653 def test_cache_invalidation_race_condition(execution_number, baseapp):
651 import time
654 import time
652
655
653 repo_id = 777
656 repo_id = 777
654
657
655 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
658 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
656 repo_id, CacheKey.CACHE_TYPE_README)
659 repo_id, CacheKey.CACHE_TYPE_README)
657 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
660 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
658 repo_id=repo_id)
661 repo_id=repo_id)
659 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
662 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
660
663
661 @run_test_concurrently(25)
664 @run_test_concurrently(25)
662 def test_create_and_delete_cache_keys():
665 def test_create_and_delete_cache_keys():
663 time.sleep(0.2)
666 time.sleep(0.2)
664
667
665 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
668 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
666 def _dummy_func(cache_key):
669 def _dummy_func(cache_key):
667 val = 'async'
670 val = 'async'
668 return 'result:{}'.format(val)
671 return 'result:{}'.format(val)
669
672
670 inv_context_manager = rc_cache.InvalidationContext(
673 inv_context_manager = rc_cache.InvalidationContext(
671 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
674 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
672
675
673 # 1st call, fresh caches
676 # 1st call, fresh caches
674 with inv_context_manager as invalidation_context:
677 with inv_context_manager as invalidation_context:
675 should_invalidate = invalidation_context.should_invalidate()
678 should_invalidate = invalidation_context.should_invalidate()
676 if should_invalidate:
679 if should_invalidate:
677 _dummy_func.invalidate('some-key-3')
680 _dummy_func.refresh('some-key-3')
678 _dummy_func('some-key-3')
681 else:
682 _dummy_func('some-key-3')
679
683
680 # Mark invalidation
684 # Mark invalidation
681 CacheKey.set_invalidate(invalidation_namespace)
685 CacheKey.set_invalidate(invalidation_namespace)
682
686
683 test_create_and_delete_cache_keys()
687 test_create_and_delete_cache_keys()
General Comments 0
You need to be logged in to leave comments. Login now