##// END OF EJS Templates
caches: use .refresh() instead of .invalidate()...
marcink -
r2939:913f92bf default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,242 +1,238 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2017-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import pytz
21 21 import logging
22 22
23 23 from pyramid.view import view_config
24 24 from pyramid.response import Response
25 25 from webhelpers.feedgenerator import Rss201rev2Feed, Atom1Feed
26 26
27 27 from rhodecode.apps._base import RepoAppView
28 28 from rhodecode.lib import audit_logger
29 29 from rhodecode.lib import rc_cache
30 30 from rhodecode.lib import helpers as h
31 31 from rhodecode.lib.auth import (
32 32 LoginRequired, HasRepoPermissionAnyDecorator)
33 33 from rhodecode.lib.diffs import DiffProcessor, LimitedDiffContainer
34 34 from rhodecode.lib.utils2 import str2bool, safe_int, md5_safe
35 35 from rhodecode.model.db import UserApiKeys, CacheKey
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 class RepoFeedView(RepoAppView):
41 41 def load_default_context(self):
42 42 c = self._get_local_tmpl_context()
43 43
44 44
45 45 self._load_defaults()
46 46 return c
47 47
48 48 def _get_config(self):
49 49 import rhodecode
50 50 config = rhodecode.CONFIG
51 51
52 52 return {
53 53 'language': 'en-us',
54 54 'feed_ttl': '5', # TTL of feed,
55 55 'feed_include_diff':
56 56 str2bool(config.get('rss_include_diff', False)),
57 57 'feed_items_per_page':
58 58 safe_int(config.get('rss_items_per_page', 20)),
59 59 'feed_diff_limit':
60 60 # we need to protect from parsing huge diffs here other way
61 61 # we can kill the server
62 62 safe_int(config.get('rss_cut_off_limit', 32 * 1024)),
63 63 }
64 64
65 65 def _load_defaults(self):
66 66 _ = self.request.translate
67 67 config = self._get_config()
68 68 # common values for feeds
69 69 self.description = _('Changes on %s repository')
70 70 self.title = self.title = _('%s %s feed') % (self.db_repo_name, '%s')
71 71 self.language = config["language"]
72 72 self.ttl = config["feed_ttl"]
73 73 self.feed_include_diff = config['feed_include_diff']
74 74 self.feed_diff_limit = config['feed_diff_limit']
75 75 self.feed_items_per_page = config['feed_items_per_page']
76 76
77 77 def _changes(self, commit):
78 78 diff_processor = DiffProcessor(
79 79 commit.diff(), diff_limit=self.feed_diff_limit)
80 80 _parsed = diff_processor.prepare(inline_diff=False)
81 81 limited_diff = isinstance(_parsed, LimitedDiffContainer)
82 82
83 83 return diff_processor, _parsed, limited_diff
84 84
85 85 def _get_title(self, commit):
86 86 return h.shorter(commit.message, 160)
87 87
88 88 def _get_description(self, commit):
89 89 _renderer = self.request.get_partial_renderer(
90 90 'rhodecode:templates/feed/atom_feed_entry.mako')
91 91 diff_processor, parsed_diff, limited_diff = self._changes(commit)
92 92 filtered_parsed_diff, has_hidden_changes = self.path_filter.filter_patchset(parsed_diff)
93 93 return _renderer(
94 94 'body',
95 95 commit=commit,
96 96 parsed_diff=filtered_parsed_diff,
97 97 limited_diff=limited_diff,
98 98 feed_include_diff=self.feed_include_diff,
99 99 diff_processor=diff_processor,
100 100 has_hidden_changes=has_hidden_changes
101 101 )
102 102
103 103 def _set_timezone(self, date, tzinfo=pytz.utc):
104 104 if not getattr(date, "tzinfo", None):
105 105 date.replace(tzinfo=tzinfo)
106 106 return date
107 107
108 108 def _get_commits(self):
109 109 return list(self.rhodecode_vcs_repo[-self.feed_items_per_page:])
110 110
111 111 def uid(self, repo_id, commit_id):
112 112 return '{}:{}'.format(md5_safe(repo_id), md5_safe(commit_id))
113 113
114 114 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED])
115 115 @HasRepoPermissionAnyDecorator(
116 116 'repository.read', 'repository.write', 'repository.admin')
117 117 @view_config(
118 118 route_name='atom_feed_home', request_method='GET',
119 119 renderer=None)
120 120 def atom(self):
121 121 """
122 122 Produce an atom-1.0 feed via feedgenerator module
123 123 """
124 124 self.load_default_context()
125 125
126 126 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
127 127 self.db_repo.repo_id, CacheKey.CACHE_TYPE_FEED)
128 128 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
129 129 repo_id=self.db_repo.repo_id)
130 130
131 131 region = rc_cache.get_or_create_region('cache_repo_longterm',
132 132 cache_namespace_uid)
133 133
134 134 condition = not self.path_filter.is_enabled
135 135
136 136 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
137 137 condition=condition)
138 138 def generate_atom_feed(repo_id, _repo_name, _feed_type):
139 139 feed = Atom1Feed(
140 140 title=self.title % _repo_name,
141 141 link=h.route_url('repo_summary', repo_name=_repo_name),
142 142 description=self.description % _repo_name,
143 143 language=self.language,
144 144 ttl=self.ttl
145 145 )
146 146
147 147 for commit in reversed(self._get_commits()):
148 148 date = self._set_timezone(commit.date)
149 149 feed.add_item(
150 150 unique_id=self.uid(repo_id, commit.raw_id),
151 151 title=self._get_title(commit),
152 152 author_name=commit.author,
153 153 description=self._get_description(commit),
154 154 link=h.route_url(
155 155 'repo_commit', repo_name=_repo_name,
156 156 commit_id=commit.raw_id),
157 157 pubdate=date,)
158 158
159 159 return feed.mime_type, feed.writeString('utf-8')
160 160
161 161 inv_context_manager = rc_cache.InvalidationContext(
162 162 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
163 163 with inv_context_manager as invalidation_context:
164 # check for stored invalidation signal, and maybe purge the cache
165 # before computing it again
164 args = (self.db_repo.repo_id, self.db_repo.repo_name, 'atom',)
165 # re-compute and store cache if we get invalidate signal
166 166 if invalidation_context.should_invalidate():
167 generate_atom_feed.invalidate(
168 self.db_repo.repo_id, self.db_repo.repo_name, 'atom')
169
170 mime_type, feed = generate_atom_feed(
171 self.db_repo.repo_id, self.db_repo.repo_name, 'atom')
167 mime_type, feed = generate_atom_feed.refresh(*args)
168 else:
169 mime_type, feed = generate_atom_feed(*args)
172 170
173 171 log.debug('Repo ATOM feed computed in %.3fs',
174 172 inv_context_manager.compute_time)
175 173
176 174 response = Response(feed)
177 175 response.content_type = mime_type
178 176 return response
179 177
180 178 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED])
181 179 @HasRepoPermissionAnyDecorator(
182 180 'repository.read', 'repository.write', 'repository.admin')
183 181 @view_config(
184 182 route_name='rss_feed_home', request_method='GET',
185 183 renderer=None)
186 184 def rss(self):
187 185 """
188 186 Produce an rss2 feed via feedgenerator module
189 187 """
190 188 self.load_default_context()
191 189
192 190 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
193 191 self.db_repo.repo_id, CacheKey.CACHE_TYPE_FEED)
194 192 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
195 193 repo_id=self.db_repo.repo_id)
196 194 region = rc_cache.get_or_create_region('cache_repo_longterm',
197 195 cache_namespace_uid)
198 196
199 197 condition = not self.path_filter.is_enabled
200 198
201 199 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
202 200 condition=condition)
203 201 def generate_rss_feed(repo_id, _repo_name, _feed_type):
204 202 feed = Rss201rev2Feed(
205 203 title=self.title % _repo_name,
206 204 link=h.route_url('repo_summary', repo_name=_repo_name),
207 205 description=self.description % _repo_name,
208 206 language=self.language,
209 207 ttl=self.ttl
210 208 )
211 209
212 210 for commit in reversed(self._get_commits()):
213 211 date = self._set_timezone(commit.date)
214 212 feed.add_item(
215 213 unique_id=self.uid(repo_id, commit.raw_id),
216 214 title=self._get_title(commit),
217 215 author_name=commit.author,
218 216 description=self._get_description(commit),
219 217 link=h.route_url(
220 218 'repo_commit', repo_name=_repo_name,
221 219 commit_id=commit.raw_id),
222 220 pubdate=date,)
223 221
224 222 return feed.mime_type, feed.writeString('utf-8')
225 223
226 224 inv_context_manager = rc_cache.InvalidationContext(
227 225 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
228 226 with inv_context_manager as invalidation_context:
229 # check for stored invalidation signal, and maybe purge the cache
230 # before computing it again
227 args = (self.db_repo.repo_id, self.db_repo.repo_name, 'rss',)
228 # re-compute and store cache if we get invalidate signal
231 229 if invalidation_context.should_invalidate():
232 generate_rss_feed.invalidate(
233 self.db_repo.repo_id, self.db_repo.repo_name, 'rss')
234
235 mime_type, feed = generate_rss_feed(
236 self.db_repo.repo_id, self.db_repo.repo_name, 'rss')
230 mime_type, feed = generate_rss_feed.refresh(*args)
231 else:
232 mime_type, feed = generate_rss_feed(*args)
237 233 log.debug(
238 234 'Repo RSS feed computed in %.3fs', inv_context_manager.compute_time)
239 235
240 236 response = Response(feed)
241 237 response.content_type = mime_type
242 238 return response
@@ -1,392 +1,390 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import string
23 23 import rhodecode
24 24
25 25 from pyramid.view import view_config
26 26
27 27 from rhodecode.controllers import utils
28 28 from rhodecode.apps._base import RepoAppView
29 29 from rhodecode.config.conf import (LANGUAGES_EXTENSIONS_MAP)
30 30 from rhodecode.lib import helpers as h, rc_cache
31 31 from rhodecode.lib.utils2 import safe_str, safe_int
32 32 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
33 33 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
34 34 from rhodecode.lib.ext_json import json
35 35 from rhodecode.lib.vcs.backends.base import EmptyCommit
36 36 from rhodecode.lib.vcs.exceptions import (
37 37 CommitError, EmptyRepositoryError, CommitDoesNotExistError)
38 38 from rhodecode.model.db import Statistics, CacheKey, User
39 39 from rhodecode.model.meta import Session
40 40 from rhodecode.model.repo import ReadmeFinder
41 41 from rhodecode.model.scm import ScmModel
42 42
43 43 log = logging.getLogger(__name__)
44 44
45 45
46 46 class RepoSummaryView(RepoAppView):
47 47
48 48 def load_default_context(self):
49 49 c = self._get_local_tmpl_context(include_app_defaults=True)
50 50 c.rhodecode_repo = None
51 51 if not c.repository_requirements_missing:
52 52 c.rhodecode_repo = self.rhodecode_vcs_repo
53 53 return c
54 54
55 55 def _get_readme_data(self, db_repo, renderer_type):
56 56
57 57 log.debug('Looking for README file')
58 58
59 59 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
60 60 db_repo.repo_id, CacheKey.CACHE_TYPE_README)
61 61 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
62 62 repo_id=self.db_repo.repo_id)
63 63 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
64 64
65 65 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
66 66 def generate_repo_readme(repo_id, _repo_name, _renderer_type):
67 67 readme_data = None
68 68 readme_node = None
69 69 readme_filename = None
70 70 commit = self._get_landing_commit_or_none(db_repo)
71 71 if commit:
72 72 log.debug("Searching for a README file.")
73 73 readme_node = ReadmeFinder(_renderer_type).search(commit)
74 74 if readme_node:
75 75 relative_urls = {
76 76 'raw': h.route_path(
77 77 'repo_file_raw', repo_name=_repo_name,
78 78 commit_id=commit.raw_id, f_path=readme_node.path),
79 79 'standard': h.route_path(
80 80 'repo_files', repo_name=_repo_name,
81 81 commit_id=commit.raw_id, f_path=readme_node.path),
82 82 }
83 83 readme_data = self._render_readme_or_none(
84 84 commit, readme_node, relative_urls)
85 85 readme_filename = readme_node.path
86 86 return readme_data, readme_filename
87 87
88 88 inv_context_manager = rc_cache.InvalidationContext(
89 89 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
90 90 with inv_context_manager as invalidation_context:
91 # check for stored invalidation signal, and maybe purge the cache
92 # before computing it again
91 args = (db_repo.repo_id, db_repo.repo_name, renderer_type,)
92 # re-compute and store cache if we get invalidate signal
93 93 if invalidation_context.should_invalidate():
94 generate_repo_readme.invalidate(
95 db_repo.repo_id, db_repo.repo_name, renderer_type)
96
97 instance = generate_repo_readme(
98 db_repo.repo_id, db_repo.repo_name, renderer_type)
94 instance = generate_repo_readme.refresh(*args)
95 else:
96 instance = generate_repo_readme(*args)
99 97
100 98 log.debug(
101 99 'Repo readme generated and computed in %.3fs',
102 100 inv_context_manager.compute_time)
103 101 return instance
104 102
105 103 def _get_landing_commit_or_none(self, db_repo):
106 104 log.debug("Getting the landing commit.")
107 105 try:
108 106 commit = db_repo.get_landing_commit()
109 107 if not isinstance(commit, EmptyCommit):
110 108 return commit
111 109 else:
112 110 log.debug("Repository is empty, no README to render.")
113 111 except CommitError:
114 112 log.exception(
115 113 "Problem getting commit when trying to render the README.")
116 114
117 115 def _render_readme_or_none(self, commit, readme_node, relative_urls):
118 116 log.debug(
119 117 'Found README file `%s` rendering...', readme_node.path)
120 118 renderer = MarkupRenderer()
121 119 try:
122 120 html_source = renderer.render(
123 121 readme_node.content, filename=readme_node.path)
124 122 if relative_urls:
125 123 return relative_links(html_source, relative_urls)
126 124 return html_source
127 125 except Exception:
128 126 log.exception(
129 127 "Exception while trying to render the README")
130 128
131 129 def _load_commits_context(self, c):
132 130 p = safe_int(self.request.GET.get('page'), 1)
133 131 size = safe_int(self.request.GET.get('size'), 10)
134 132
135 133 def url_generator(**kw):
136 134 query_params = {
137 135 'size': size
138 136 }
139 137 query_params.update(kw)
140 138 return h.route_path(
141 139 'repo_summary_commits',
142 140 repo_name=c.rhodecode_db_repo.repo_name, _query=query_params)
143 141
144 142 pre_load = ['author', 'branch', 'date', 'message']
145 143 try:
146 144 collection = self.rhodecode_vcs_repo.get_commits(pre_load=pre_load)
147 145 except EmptyRepositoryError:
148 146 collection = self.rhodecode_vcs_repo
149 147
150 148 c.repo_commits = h.RepoPage(
151 149 collection, page=p, items_per_page=size, url=url_generator)
152 150 page_ids = [x.raw_id for x in c.repo_commits]
153 151 c.comments = self.db_repo.get_comments(page_ids)
154 152 c.statuses = self.db_repo.statuses(page_ids)
155 153
156 154 @LoginRequired()
157 155 @HasRepoPermissionAnyDecorator(
158 156 'repository.read', 'repository.write', 'repository.admin')
159 157 @view_config(
160 158 route_name='repo_summary_commits', request_method='GET',
161 159 renderer='rhodecode:templates/summary/summary_commits.mako')
162 160 def summary_commits(self):
163 161 c = self.load_default_context()
164 162 self._load_commits_context(c)
165 163 return self._get_template_context(c)
166 164
167 165 @LoginRequired()
168 166 @HasRepoPermissionAnyDecorator(
169 167 'repository.read', 'repository.write', 'repository.admin')
170 168 @view_config(
171 169 route_name='repo_summary', request_method='GET',
172 170 renderer='rhodecode:templates/summary/summary.mako')
173 171 @view_config(
174 172 route_name='repo_summary_slash', request_method='GET',
175 173 renderer='rhodecode:templates/summary/summary.mako')
176 174 @view_config(
177 175 route_name='repo_summary_explicit', request_method='GET',
178 176 renderer='rhodecode:templates/summary/summary.mako')
179 177 def summary(self):
180 178 c = self.load_default_context()
181 179
182 180 # Prepare the clone URL
183 181 username = ''
184 182 if self._rhodecode_user.username != User.DEFAULT_USER:
185 183 username = safe_str(self._rhodecode_user.username)
186 184
187 185 _def_clone_uri = _def_clone_uri_id = c.clone_uri_tmpl
188 186 _def_clone_uri_ssh = c.clone_uri_ssh_tmpl
189 187
190 188 if '{repo}' in _def_clone_uri:
191 189 _def_clone_uri_id = _def_clone_uri.replace(
192 190 '{repo}', '_{repoid}')
193 191 elif '{repoid}' in _def_clone_uri:
194 192 _def_clone_uri_id = _def_clone_uri.replace(
195 193 '_{repoid}', '{repo}')
196 194
197 195 c.clone_repo_url = self.db_repo.clone_url(
198 196 user=username, uri_tmpl=_def_clone_uri)
199 197 c.clone_repo_url_id = self.db_repo.clone_url(
200 198 user=username, uri_tmpl=_def_clone_uri_id)
201 199 c.clone_repo_url_ssh = self.db_repo.clone_url(
202 200 uri_tmpl=_def_clone_uri_ssh, ssh=True)
203 201
204 202 # If enabled, get statistics data
205 203
206 204 c.show_stats = bool(self.db_repo.enable_statistics)
207 205
208 206 stats = Session().query(Statistics) \
209 207 .filter(Statistics.repository == self.db_repo) \
210 208 .scalar()
211 209
212 210 c.stats_percentage = 0
213 211
214 212 if stats and stats.languages:
215 213 c.no_data = False is self.db_repo.enable_statistics
216 214 lang_stats_d = json.loads(stats.languages)
217 215
218 216 # Sort first by decreasing count and second by the file extension,
219 217 # so we have a consistent output.
220 218 lang_stats_items = sorted(lang_stats_d.iteritems(),
221 219 key=lambda k: (-k[1], k[0]))[:10]
222 220 lang_stats = [(x, {"count": y,
223 221 "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
224 222 for x, y in lang_stats_items]
225 223
226 224 c.trending_languages = json.dumps(lang_stats)
227 225 else:
228 226 c.no_data = True
229 227 c.trending_languages = json.dumps({})
230 228
231 229 scm_model = ScmModel()
232 230 c.enable_downloads = self.db_repo.enable_downloads
233 231 c.repository_followers = scm_model.get_followers(self.db_repo)
234 232 c.repository_forks = scm_model.get_forks(self.db_repo)
235 233 c.repository_is_user_following = scm_model.is_following_repo(
236 234 self.db_repo_name, self._rhodecode_user.user_id)
237 235
238 236 # first interaction with the VCS instance after here...
239 237 if c.repository_requirements_missing:
240 238 self.request.override_renderer = \
241 239 'rhodecode:templates/summary/missing_requirements.mako'
242 240 return self._get_template_context(c)
243 241
244 242 c.readme_data, c.readme_file = \
245 243 self._get_readme_data(self.db_repo, c.visual.default_renderer)
246 244
247 245 # loads the summary commits template context
248 246 self._load_commits_context(c)
249 247
250 248 return self._get_template_context(c)
251 249
252 250 def get_request_commit_id(self):
253 251 return self.request.matchdict['commit_id']
254 252
255 253 @LoginRequired()
256 254 @HasRepoPermissionAnyDecorator(
257 255 'repository.read', 'repository.write', 'repository.admin')
258 256 @view_config(
259 257 route_name='repo_stats', request_method='GET',
260 258 renderer='json_ext')
261 259 def repo_stats(self):
262 260 commit_id = self.get_request_commit_id()
263 261 show_stats = bool(self.db_repo.enable_statistics)
264 262 repo_id = self.db_repo.repo_id
265 263
266 264 cache_seconds = safe_int(
267 265 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
268 266 cache_on = cache_seconds > 0
269 267 log.debug(
270 268 'Computing REPO TREE for repo_id %s commit_id `%s` '
271 269 'with caching: %s[TTL: %ss]' % (
272 270 repo_id, commit_id, cache_on, cache_seconds or 0))
273 271
274 272 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
275 273 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
276 274
277 275 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
278 276 condition=cache_on)
279 277 def compute_stats(repo_id, commit_id, show_stats):
280 278 code_stats = {}
281 279 size = 0
282 280 try:
283 281 scm_instance = self.db_repo.scm_instance()
284 282 commit = scm_instance.get_commit(commit_id)
285 283
286 284 for node in commit.get_filenodes_generator():
287 285 size += node.size
288 286 if not show_stats:
289 287 continue
290 288 ext = string.lower(node.extension)
291 289 ext_info = LANGUAGES_EXTENSIONS_MAP.get(ext)
292 290 if ext_info:
293 291 if ext in code_stats:
294 292 code_stats[ext]['count'] += 1
295 293 else:
296 294 code_stats[ext] = {"count": 1, "desc": ext_info}
297 295 except (EmptyRepositoryError, CommitDoesNotExistError):
298 296 pass
299 297 return {'size': h.format_byte_size_binary(size),
300 298 'code_stats': code_stats}
301 299
302 300 stats = compute_stats(self.db_repo.repo_id, commit_id, show_stats)
303 301 return stats
304 302
305 303 @LoginRequired()
306 304 @HasRepoPermissionAnyDecorator(
307 305 'repository.read', 'repository.write', 'repository.admin')
308 306 @view_config(
309 307 route_name='repo_refs_data', request_method='GET',
310 308 renderer='json_ext')
311 309 def repo_refs_data(self):
312 310 _ = self.request.translate
313 311 self.load_default_context()
314 312
315 313 repo = self.rhodecode_vcs_repo
316 314 refs_to_create = [
317 315 (_("Branch"), repo.branches, 'branch'),
318 316 (_("Tag"), repo.tags, 'tag'),
319 317 (_("Bookmark"), repo.bookmarks, 'book'),
320 318 ]
321 319 res = self._create_reference_data(
322 320 repo, self.db_repo_name, refs_to_create)
323 321 data = {
324 322 'more': False,
325 323 'results': res
326 324 }
327 325 return data
328 326
329 327 @LoginRequired()
330 328 @HasRepoPermissionAnyDecorator(
331 329 'repository.read', 'repository.write', 'repository.admin')
332 330 @view_config(
333 331 route_name='repo_refs_changelog_data', request_method='GET',
334 332 renderer='json_ext')
335 333 def repo_refs_changelog_data(self):
336 334 _ = self.request.translate
337 335 self.load_default_context()
338 336
339 337 repo = self.rhodecode_vcs_repo
340 338
341 339 refs_to_create = [
342 340 (_("Branches"), repo.branches, 'branch'),
343 341 (_("Closed branches"), repo.branches_closed, 'branch_closed'),
344 342 # TODO: enable when vcs can handle bookmarks filters
345 343 # (_("Bookmarks"), repo.bookmarks, "book"),
346 344 ]
347 345 res = self._create_reference_data(
348 346 repo, self.db_repo_name, refs_to_create)
349 347 data = {
350 348 'more': False,
351 349 'results': res
352 350 }
353 351 return data
354 352
355 353 def _create_reference_data(self, repo, full_repo_name, refs_to_create):
356 354 format_ref_id = utils.get_format_ref_id(repo)
357 355
358 356 result = []
359 357 for title, refs, ref_type in refs_to_create:
360 358 if refs:
361 359 result.append({
362 360 'text': title,
363 361 'children': self._create_reference_items(
364 362 repo, full_repo_name, refs, ref_type,
365 363 format_ref_id),
366 364 })
367 365 return result
368 366
369 367 def _create_reference_items(self, repo, full_repo_name, refs, ref_type,
370 368 format_ref_id):
371 369 result = []
372 370 is_svn = h.is_svn(repo)
373 371 for ref_name, raw_id in refs.iteritems():
374 372 files_url = self._create_files_url(
375 373 repo, full_repo_name, ref_name, raw_id, is_svn)
376 374 result.append({
377 375 'text': ref_name,
378 376 'id': format_ref_id(ref_name, raw_id),
379 377 'raw_id': raw_id,
380 378 'type': ref_type,
381 379 'files_url': files_url,
382 380 })
383 381 return result
384 382
385 383 def _create_files_url(self, repo, full_repo_name, ref_name, raw_id, is_svn):
386 384 use_commit_id = '/' in ref_name or is_svn
387 385 return h.route_path(
388 386 'repo_files',
389 387 repo_name=full_repo_name,
390 388 f_path=ref_name if is_svn else '',
391 389 commit_id=raw_id if use_commit_id else ref_name,
392 390 _query=dict(at=ref_name))
@@ -1,319 +1,320 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import os
21 21 import time
22 22 import logging
23 23 import functools
24 24 import threading
25 25
26 26 from dogpile.cache import CacheRegion
27 27 from dogpile.cache.util import compat
28 28
29 29 import rhodecode
30 30 from rhodecode.lib.utils import safe_str, sha1
31 31 from rhodecode.lib.utils2 import safe_unicode, str2bool
32 32 from rhodecode.model.db import Session, CacheKey, IntegrityError
33 33
34 34 from . import region_meta
35 35
36 36 log = logging.getLogger(__name__)
37 37
38 38
39 39 class RhodeCodeCacheRegion(CacheRegion):
40 40
41 41 def conditional_cache_on_arguments(
42 42 self, namespace=None,
43 43 expiration_time=None,
44 44 should_cache_fn=None,
45 45 to_str=compat.string_type,
46 46 function_key_generator=None,
47 47 condition=True):
48 48 """
49 49 Custom conditional decorator, that will not touch any dogpile internals if
50 50 condition isn't meet. This works a bit different than should_cache_fn
51 51 And it's faster in cases we don't ever want to compute cached values
52 52 """
53 53 expiration_time_is_callable = compat.callable(expiration_time)
54 54
55 55 if function_key_generator is None:
56 56 function_key_generator = self.function_key_generator
57 57
58 58 def decorator(fn):
59 59 if to_str is compat.string_type:
60 60 # backwards compatible
61 61 key_generator = function_key_generator(namespace, fn)
62 62 else:
63 63 key_generator = function_key_generator(namespace, fn, to_str=to_str)
64 64
65 65 @functools.wraps(fn)
66 66 def decorate(*arg, **kw):
67 67 key = key_generator(*arg, **kw)
68 68
69 69 @functools.wraps(fn)
70 70 def creator():
71 71 return fn(*arg, **kw)
72 72
73 73 if not condition:
74 74 return creator()
75 75
76 76 timeout = expiration_time() if expiration_time_is_callable \
77 77 else expiration_time
78 78
79 79 return self.get_or_create(key, creator, timeout, should_cache_fn)
80 80
81 81 def invalidate(*arg, **kw):
82 82 key = key_generator(*arg, **kw)
83 83 self.delete(key)
84 84
85 85 def set_(value, *arg, **kw):
86 86 key = key_generator(*arg, **kw)
87 87 self.set(key, value)
88 88
89 89 def get(*arg, **kw):
90 90 key = key_generator(*arg, **kw)
91 91 return self.get(key)
92 92
93 93 def refresh(*arg, **kw):
94 94 key = key_generator(*arg, **kw)
95 95 value = fn(*arg, **kw)
96 96 self.set(key, value)
97 97 return value
98 98
99 99 decorate.set = set_
100 100 decorate.invalidate = invalidate
101 101 decorate.refresh = refresh
102 102 decorate.get = get
103 103 decorate.original = fn
104 104 decorate.key_generator = key_generator
105 105
106 106 return decorate
107 107
108 108 return decorator
109 109
110 110
111 111 def make_region(*arg, **kw):
112 112 return RhodeCodeCacheRegion(*arg, **kw)
113 113
114 114
115 115 def get_default_cache_settings(settings, prefixes=None):
116 116 prefixes = prefixes or []
117 117 cache_settings = {}
118 118 for key in settings.keys():
119 119 for prefix in prefixes:
120 120 if key.startswith(prefix):
121 121 name = key.split(prefix)[1].strip()
122 122 val = settings[key]
123 123 if isinstance(val, basestring):
124 124 val = val.strip()
125 125 cache_settings[name] = val
126 126 return cache_settings
127 127
128 128
129 129 def compute_key_from_params(*args):
130 130 """
131 131 Helper to compute key from given params to be used in cache manager
132 132 """
133 133 return sha1("_".join(map(safe_str, args)))
134 134
135 135
136 136 def key_generator(namespace, fn):
137 137 fname = fn.__name__
138 138
139 139 def generate_key(*args):
140 140 namespace_pref = namespace or 'default'
141 141 arg_key = compute_key_from_params(*args)
142 142 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
143 143
144 144 return final_key
145 145
146 146 return generate_key
147 147
148 148
149 149 def get_or_create_region(region_name, region_namespace=None):
150 150 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
151 151 region_obj = region_meta.dogpile_cache_regions.get(region_name)
152 152 if not region_obj:
153 153 raise EnvironmentError(
154 154 'Region `{}` not in configured: {}.'.format(
155 155 region_name, region_meta.dogpile_cache_regions.keys()))
156 156
157 157 region_uid_name = '{}:{}'.format(region_name, region_namespace)
158 158 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
159 159 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
160 160 if region_exist:
161 161 log.debug('Using already configured region: %s', region_namespace)
162 162 return region_exist
163 163 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
164 164 expiration_time = region_obj.expiration_time
165 165
166 166 if not os.path.isdir(cache_dir):
167 167 os.makedirs(cache_dir)
168 168 new_region = make_region(
169 169 name=region_uid_name, function_key_generator=key_generator
170 170 )
171 171 namespace_filename = os.path.join(
172 172 cache_dir, "{}.cache.dbm".format(region_namespace))
173 173 # special type that allows 1db per namespace
174 174 new_region.configure(
175 175 backend='dogpile.cache.rc.file_namespace',
176 176 expiration_time=expiration_time,
177 177 arguments={"filename": namespace_filename}
178 178 )
179 179
180 180 # create and save in region caches
181 181 log.debug('configuring new region: %s',region_uid_name)
182 182 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
183 183
184 184 return region_obj
185 185
186 186
187 187 def clear_cache_namespace(cache_region, cache_namespace_uid):
188 188 region = get_or_create_region(cache_region, cache_namespace_uid)
189 189 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
190 190 region.delete_multi(cache_keys)
191 191 return len(cache_keys)
192 192
193 193
194 194 class ActiveRegionCache(object):
195 195 def __init__(self, context):
196 196 self.context = context
197 197
198 198 def should_invalidate(self):
199 199 return False
200 200
201 201
202 202 class FreshRegionCache(object):
203 203 def __init__(self, context):
204 204 self.context = context
205 205
206 206 def should_invalidate(self):
207 207 return True
208 208
209 209
210 210 class InvalidationContext(object):
211 211 """
212 212 usage::
213 213
214 214 from rhodecode.lib import rc_cache
215 215
216 216 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
217 217 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
218 218
219 219 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
220 220 def heavy_compute(cache_name, param1, param2):
221 221 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
222 222
223 223 # invalidation namespace is shared namespace key for all process caches
224 224 # we use it to send a global signal
225 225 invalidation_namespace = 'repo_cache:1'
226 226
227 227 inv_context_manager = rc_cache.InvalidationContext(
228 228 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
229 229 with inv_context_manager as invalidation_context:
230 # check for stored invalidation signal, and maybe purge the cache
231 # before computing it again
230 args = ('one', 'two')
231 # re-compute and store cache if we get invalidate signal
232 232 if invalidation_context.should_invalidate():
233 heavy_compute.invalidate('some_name', 'param1', 'param2')
233 result = heavy_compute.refresh(*args)
234 else:
235 result = heavy_compute(*args)
234 236
235 result = heavy_compute('some_name', 'param1', 'param2')
236 237 compute_time = inv_context_manager.compute_time
237 print(compute_time)
238 log.debug('result computed in %.3fs' ,compute_time)
238 239
239 240 # To send global invalidation signal, simply run
240 241 CacheKey.set_invalidate(invalidation_namespace)
241 242
242 243 """
243 244
244 245 def __repr__(self):
245 246 return '<InvalidationContext:{}[{}]>'.format(
246 247 safe_str(self.cache_key), safe_str(self.uid))
247 248
248 249 def __init__(self, uid, invalidation_namespace='',
249 250 raise_exception=False, thread_scoped=None):
250 251 self.uid = uid
251 252 self.invalidation_namespace = invalidation_namespace
252 253 self.raise_exception = raise_exception
253 254 self.proc_id = safe_unicode(rhodecode.CONFIG.get('instance_id') or 'DEFAULT')
254 255 self.thread_id = 'global'
255 256
256 257 if thread_scoped is None:
257 258 # if we set "default" we can override this via .ini settings
258 259 thread_scoped = str2bool(rhodecode.CONFIG.get('cache_thread_scoped'))
259 260
260 261 # Append the thread id to the cache key if this invalidation context
261 262 # should be scoped to the current thread.
262 263 if thread_scoped is True:
263 264 self.thread_id = threading.current_thread().ident
264 265
265 266 self.cache_key = compute_key_from_params(uid)
266 267 self.cache_key = 'proc:{}_thread:{}_{}'.format(
267 268 self.proc_id, self.thread_id, self.cache_key)
268 269 self.compute_time = 0
269 270
270 271 def get_or_create_cache_obj(self, uid, invalidation_namespace=''):
271 272 cache_obj = CacheKey.get_active_cache(self.cache_key)
272 273 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
273 274 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
274 275 if not cache_obj:
275 276 cache_obj = CacheKey(self.cache_key, cache_args=invalidation_namespace)
276 277 return cache_obj
277 278
278 279 def __enter__(self):
279 280 """
280 281 Test if current object is valid, and return CacheRegion function
281 282 that does invalidation and calculation
282 283 """
283 284 # register or get a new key based on uid
284 285 self.cache_obj = self.get_or_create_cache_obj(uid=self.uid)
285 286 self._start_time = time.time()
286 287 if self.cache_obj.cache_active:
287 288 # means our cache obj is existing and marked as it's
288 289 # cache is not outdated, we return ActiveRegionCache
289 290 self.skip_cache_active_change = True
290 291
291 292 return ActiveRegionCache(context=self)
292 293
293 294 # the key is either not existing or set to False, we return
294 295 # the real invalidator which re-computes value. We additionally set
295 296 # the flag to actually update the Database objects
296 297 self.skip_cache_active_change = False
297 298 return FreshRegionCache(context=self)
298 299
299 300 def __exit__(self, exc_type, exc_val, exc_tb):
300 301 # save compute time
301 302 self.compute_time = time.time() - self._start_time
302 303
303 304 if self.skip_cache_active_change:
304 305 return
305 306
306 307 try:
307 308 self.cache_obj.cache_active = True
308 309 Session().add(self.cache_obj)
309 310 Session().commit()
310 311 except IntegrityError:
311 312 # if we catch integrity error, it means we inserted this object
312 313 # assumption is that's really an edge race-condition case and
313 314 # it's safe is to skip it
314 315 Session().rollback()
315 316 except Exception:
316 317 log.exception('Failed to commit on cache key update')
317 318 Session().rollback()
318 319 if self.raise_exception:
319 320 raise
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,683 +1,687 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Package for testing various lib/helper functions in rhodecode
24 24 """
25 25
26 26 import datetime
27 27 import string
28 28 import mock
29 29 import pytest
30 30
31 31 from rhodecode.tests import no_newline_id_generator
32 32 from rhodecode.tests.utils import run_test_concurrently
33 33
34 34 from rhodecode.lib import rc_cache
35 35 from rhodecode.lib.helpers import InitialsGravatar
36 36 from rhodecode.lib.utils2 import AttributeDict
37 37
38 38 from rhodecode.model.db import Repository, CacheKey
39 39
40 40
41 41 def _urls_for_proto(proto):
42 42 return [
43 43 ('%s://127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
44 44 '%s://127.0.0.1' % proto),
45 45 ('%s://marcink@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
46 46 '%s://127.0.0.1' % proto),
47 47 ('%s://marcink:pass@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
48 48 '%s://127.0.0.1' % proto),
49 49 ('%s://127.0.0.1:8080' % proto, ['%s://' % proto, '127.0.0.1', '8080'],
50 50 '%s://127.0.0.1:8080' % proto),
51 51 ('%s://domain.org' % proto, ['%s://' % proto, 'domain.org'],
52 52 '%s://domain.org' % proto),
53 53 ('%s://user:pass@domain.org:8080' % proto,
54 54 ['%s://' % proto, 'domain.org', '8080'],
55 55 '%s://domain.org:8080' % proto),
56 56 ]
57 57
58 58 TEST_URLS = _urls_for_proto('http') + _urls_for_proto('https')
59 59
60 60
61 61 @pytest.mark.parametrize("test_url, expected, expected_creds", TEST_URLS)
62 62 def test_uri_filter(test_url, expected, expected_creds):
63 63 from rhodecode.lib.utils2 import uri_filter
64 64 assert uri_filter(test_url) == expected
65 65
66 66
67 67 @pytest.mark.parametrize("test_url, expected, expected_creds", TEST_URLS)
68 68 def test_credentials_filter(test_url, expected, expected_creds):
69 69 from rhodecode.lib.utils2 import credentials_filter
70 70 assert credentials_filter(test_url) == expected_creds
71 71
72 72
73 73 @pytest.mark.parametrize("str_bool, expected", [
74 74 ('t', True),
75 75 ('true', True),
76 76 ('y', True),
77 77 ('yes', True),
78 78 ('on', True),
79 79 ('1', True),
80 80 ('Y', True),
81 81 ('yeS', True),
82 82 ('Y', True),
83 83 ('TRUE', True),
84 84 ('T', True),
85 85 ('False', False),
86 86 ('F', False),
87 87 ('FALSE', False),
88 88 ('0', False),
89 89 ('-1', False),
90 90 ('', False)
91 91 ])
92 92 def test_str2bool(str_bool, expected):
93 93 from rhodecode.lib.utils2 import str2bool
94 94 assert str2bool(str_bool) == expected
95 95
96 96
97 97 @pytest.mark.parametrize("text, expected", reduce(lambda a1,a2:a1+a2, [
98 98 [
99 99 (pref+"", []),
100 100 (pref+"Hi there @marcink", ['marcink']),
101 101 (pref+"Hi there @marcink and @bob", ['bob', 'marcink']),
102 102 (pref+"Hi there @marcink\n", ['marcink']),
103 103 (pref+"Hi there @marcink and @bob\n", ['bob', 'marcink']),
104 104 (pref+"Hi there marcin@rhodecode.com", []),
105 105 (pref+"Hi there @john.malcovic and @bob\n", ['bob', 'john.malcovic']),
106 106 (pref+"This needs to be reviewed: (@marcink,@john)", ["john", "marcink"]),
107 107 (pref+"This needs to be reviewed: (@marcink, @john)", ["john", "marcink"]),
108 108 (pref+"This needs to be reviewed: [@marcink,@john]", ["john", "marcink"]),
109 109 (pref+"This needs to be reviewed: (@marcink @john)", ["john", "marcink"]),
110 110 (pref+"@john @mary, please review", ["john", "mary"]),
111 111 (pref+"@john,@mary, please review", ["john", "mary"]),
112 112 (pref+"Hej @123, @22john,@mary, please review", ['123', '22john', 'mary']),
113 113 (pref+"@first hi there @marcink here's my email marcin@email.com "
114 114 "@lukaszb check @one_more22 it pls @ ttwelve @D[] @one@two@three ", ['first', 'lukaszb', 'marcink', 'one', 'one_more22']),
115 115 (pref+"@MARCIN @maRCiN @2one_more22 @john please see this http://org.pl", ['2one_more22', 'john', 'MARCIN', 'maRCiN']),
116 116 (pref+"@marian.user just do it @marco-polo and next extract @marco_polo", ['marco-polo', 'marco_polo', 'marian.user']),
117 117 (pref+"user.dot hej ! not-needed maril@domain.org", []),
118 118 (pref+"\n@marcin", ['marcin']),
119 119 ]
120 120 for pref in ['', '\n', 'hi !', '\t', '\n\n']]), ids=no_newline_id_generator)
121 121 def test_mention_extractor(text, expected):
122 122 from rhodecode.lib.utils2 import extract_mentioned_users
123 123 got = extract_mentioned_users(text)
124 124 assert sorted(got, key=lambda x: x.lower()) == got
125 125 assert set(expected) == set(got)
126 126
127 127 @pytest.mark.parametrize("age_args, expected, kw", [
128 128 ({}, u'just now', {}),
129 129 ({'seconds': -1}, u'1 second ago', {}),
130 130 ({'seconds': -60 * 2}, u'2 minutes ago', {}),
131 131 ({'hours': -1}, u'1 hour ago', {}),
132 132 ({'hours': -24}, u'1 day ago', {}),
133 133 ({'hours': -24 * 5}, u'5 days ago', {}),
134 134 ({'months': -1}, u'1 month ago', {}),
135 135 ({'months': -1, 'days': -2}, u'1 month and 2 days ago', {}),
136 136 ({'years': -1, 'months': -1}, u'1 year and 1 month ago', {}),
137 137 ({}, u'just now', {'short_format': True}),
138 138 ({'seconds': -1}, u'1sec ago', {'short_format': True}),
139 139 ({'seconds': -60 * 2}, u'2min ago', {'short_format': True}),
140 140 ({'hours': -1}, u'1h ago', {'short_format': True}),
141 141 ({'hours': -24}, u'1d ago', {'short_format': True}),
142 142 ({'hours': -24 * 5}, u'5d ago', {'short_format': True}),
143 143 ({'months': -1}, u'1m ago', {'short_format': True}),
144 144 ({'months': -1, 'days': -2}, u'1m, 2d ago', {'short_format': True}),
145 145 ({'years': -1, 'months': -1}, u'1y, 1m ago', {'short_format': True}),
146 146 ])
147 147 def test_age(age_args, expected, kw, baseapp):
148 148 from rhodecode.lib.utils2 import age
149 149 from dateutil import relativedelta
150 150 n = datetime.datetime(year=2012, month=5, day=17)
151 151 delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs)
152 152
153 153 def translate(elem):
154 154 return elem.interpolate()
155 155
156 156 assert translate(age(n + delt(**age_args), now=n, **kw)) == expected
157 157
158 158
159 159 @pytest.mark.parametrize("age_args, expected, kw", [
160 160 ({}, u'just now', {}),
161 161 ({'seconds': 1}, u'in 1 second', {}),
162 162 ({'seconds': 60 * 2}, u'in 2 minutes', {}),
163 163 ({'hours': 1}, u'in 1 hour', {}),
164 164 ({'hours': 24}, u'in 1 day', {}),
165 165 ({'hours': 24 * 5}, u'in 5 days', {}),
166 166 ({'months': 1}, u'in 1 month', {}),
167 167 ({'months': 1, 'days': 1}, u'in 1 month and 1 day', {}),
168 168 ({'years': 1, 'months': 1}, u'in 1 year and 1 month', {}),
169 169 ({}, u'just now', {'short_format': True}),
170 170 ({'seconds': 1}, u'in 1sec', {'short_format': True}),
171 171 ({'seconds': 60 * 2}, u'in 2min', {'short_format': True}),
172 172 ({'hours': 1}, u'in 1h', {'short_format': True}),
173 173 ({'hours': 24}, u'in 1d', {'short_format': True}),
174 174 ({'hours': 24 * 5}, u'in 5d', {'short_format': True}),
175 175 ({'months': 1}, u'in 1m', {'short_format': True}),
176 176 ({'months': 1, 'days': 1}, u'in 1m, 1d', {'short_format': True}),
177 177 ({'years': 1, 'months': 1}, u'in 1y, 1m', {'short_format': True}),
178 178 ])
179 179 def test_age_in_future(age_args, expected, kw, baseapp):
180 180 from rhodecode.lib.utils2 import age
181 181 from dateutil import relativedelta
182 182 n = datetime.datetime(year=2012, month=5, day=17)
183 183 delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs)
184 184
185 185 def translate(elem):
186 186 return elem.interpolate()
187 187
188 188 assert translate(age(n + delt(**age_args), now=n, **kw)) == expected
189 189
190 190
191 191 @pytest.mark.parametrize("sample, expected_tags", [
192 192 # entry
193 193 ((
194 194 ""
195 195 ),
196 196 [
197 197
198 198 ]),
199 199 # entry
200 200 ((
201 201 "hello world [stale]"
202 202 ),
203 203 [
204 204 ('state', '[stale]'),
205 205 ]),
206 206 # entry
207 207 ((
208 208 "hello world [v2.0.0] [v1.0.0]"
209 209 ),
210 210 [
211 211 ('generic', '[v2.0.0]'),
212 212 ('generic', '[v1.0.0]'),
213 213 ]),
214 214 # entry
215 215 ((
216 216 "he[ll]o wo[rl]d"
217 217 ),
218 218 [
219 219 ('label', '[ll]'),
220 220 ('label', '[rl]'),
221 221 ]),
222 222 # entry
223 223 ((
224 224 "hello world [stale]\n[featured]\n[stale] [dead] [dev]"
225 225 ),
226 226 [
227 227 ('state', '[stale]'),
228 228 ('state', '[featured]'),
229 229 ('state', '[stale]'),
230 230 ('state', '[dead]'),
231 231 ('state', '[dev]'),
232 232 ]),
233 233 # entry
234 234 ((
235 235 "hello world \n\n [stale] \n [url =&gt; [name](http://rc.com)]"
236 236 ),
237 237 [
238 238 ('state', '[stale]'),
239 239 ('url', '[url =&gt; [name](http://rc.com)]'),
240 240 ]),
241 241 # entry
242 242 ((
243 243 "[url =&gt; [linkNameJS](javascript:alert(document.domain))]\n"
244 244 "[url =&gt; [linkNameHTTP](http://rhodecode.com)]\n"
245 245 "[url =&gt; [linkNameHTTPS](https://rhodecode.com)]\n"
246 246 "[url =&gt; [linkNamePath](/repo_group)]\n"
247 247 ),
248 248 [
249 249 ('generic', '[linkNameJS]'),
250 250 ('url', '[url =&gt; [linkNameHTTP](http://rhodecode.com)]'),
251 251 ('url', '[url =&gt; [linkNameHTTPS](https://rhodecode.com)]'),
252 252 ('url', '[url =&gt; [linkNamePath](/repo_group)]'),
253 253 ]),
254 254 # entry
255 255 ((
256 256 "hello pta[tag] gog [[]] [[] sda ero[or]d [me =&gt;>< sa]"
257 257 "[requires] [stale] [see<>=&gt;] [see =&gt; http://url.com]"
258 258 "[requires =&gt; url] [lang =&gt; python] [just a tag] "
259 259 "<html_tag first='abc' attr=\"my.url?attr=&another=\"></html_tag>"
260 260 "[,d] [ =&gt; ULR ] [obsolete] [desc]]"
261 261 ),
262 262 [
263 263 ('label', '[desc]'),
264 264 ('label', '[obsolete]'),
265 265 ('label', '[or]'),
266 266 ('label', '[requires]'),
267 267 ('label', '[tag]'),
268 268 ('state', '[stale]'),
269 269 ('lang', '[lang =&gt; python]'),
270 270 ('ref', '[requires =&gt; url]'),
271 271 ('see', '[see =&gt; http://url.com]'),
272 272
273 273 ]),
274 274
275 275 ], ids=no_newline_id_generator)
276 276 def test_metatag_extraction(sample, expected_tags):
277 277 from rhodecode.lib.helpers import extract_metatags
278 278 tags, value = extract_metatags(sample)
279 279 assert sorted(tags) == sorted(expected_tags)
280 280
281 281
282 282 @pytest.mark.parametrize("tag_data, expected_html", [
283 283
284 284 (('state', '[stable]'), '<div class="metatag" tag="state stable">stable</div>'),
285 285 (('state', '[stale]'), '<div class="metatag" tag="state stale">stale</div>'),
286 286 (('state', '[featured]'), '<div class="metatag" tag="state featured">featured</div>'),
287 287 (('state', '[dev]'), '<div class="metatag" tag="state dev">dev</div>'),
288 288 (('state', '[dead]'), '<div class="metatag" tag="state dead">dead</div>'),
289 289
290 290 (('label', '[personal]'), '<div class="metatag" tag="label">personal</div>'),
291 291 (('generic', '[v2.0.0]'), '<div class="metatag" tag="generic">v2.0.0</div>'),
292 292
293 293 (('lang', '[lang =&gt; JavaScript]'), '<div class="metatag" tag="lang">JavaScript</div>'),
294 294 (('lang', '[lang =&gt; C++]'), '<div class="metatag" tag="lang">C++</div>'),
295 295 (('lang', '[lang =&gt; C#]'), '<div class="metatag" tag="lang">C#</div>'),
296 296 (('lang', '[lang =&gt; Delphi/Object]'), '<div class="metatag" tag="lang">Delphi/Object</div>'),
297 297 (('lang', '[lang =&gt; Objective-C]'), '<div class="metatag" tag="lang">Objective-C</div>'),
298 298 (('lang', '[lang =&gt; .NET]'), '<div class="metatag" tag="lang">.NET</div>'),
299 299
300 300 (('license', '[license =&gt; BSD 3-clause]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/BSD 3-clause">BSD 3-clause</a></div>'),
301 301 (('license', '[license =&gt; GPLv3]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/GPLv3">GPLv3</a></div>'),
302 302 (('license', '[license =&gt; MIT]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/MIT">MIT</a></div>'),
303 303 (('license', '[license =&gt; AGPLv3]'), '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/AGPLv3">AGPLv3</a></div>'),
304 304
305 305 (('ref', '[requires =&gt; RepoName]'), '<div class="metatag" tag="ref requires">requires: <a href="/RepoName">RepoName</a></div>'),
306 306 (('ref', '[recommends =&gt; GroupName]'), '<div class="metatag" tag="ref recommends">recommends: <a href="/GroupName">GroupName</a></div>'),
307 307 (('ref', '[conflicts =&gt; SomeName]'), '<div class="metatag" tag="ref conflicts">conflicts: <a href="/SomeName">SomeName</a></div>'),
308 308 (('ref', '[base =&gt; SomeName]'), '<div class="metatag" tag="ref base">base: <a href="/SomeName">SomeName</a></div>'),
309 309
310 310 (('see', '[see =&gt; http://rhodecode.com]'), '<div class="metatag" tag="see">see: http://rhodecode.com </div>'),
311 311
312 312 (('url', '[url =&gt; [linkName](https://rhodecode.com)]'), '<div class="metatag" tag="url"> <a href="https://rhodecode.com">linkName</a> </div>'),
313 313 (('url', '[url =&gt; [example link](https://rhodecode.com)]'), '<div class="metatag" tag="url"> <a href="https://rhodecode.com">example link</a> </div>'),
314 314 (('url', '[url =&gt; [v1.0.0](https://rhodecode.com)]'), '<div class="metatag" tag="url"> <a href="https://rhodecode.com">v1.0.0</a> </div>'),
315 315
316 316 ])
317 317 def test_metatags_stylize(tag_data, expected_html):
318 318 from rhodecode.lib.helpers import style_metatag
319 319 tag_type,value = tag_data
320 320 assert style_metatag(tag_type, value) == expected_html
321 321
322 322
323 323 @pytest.mark.parametrize("tmpl_url, email, expected", [
324 324 ('http://test.com/{email}', 'test@foo.com', 'http://test.com/test@foo.com'),
325 325
326 326 ('http://test.com/{md5email}', 'test@foo.com', 'http://test.com/3cb7232fcc48743000cb86d0d5022bd9'),
327 327 ('http://test.com/{md5email}', 'testΔ…Δ‡@foo.com', 'http://test.com/978debb907a3c55cd741872ab293ef30'),
328 328
329 329 ('http://testX.com/{md5email}?s={size}', 'test@foo.com', 'http://testX.com/3cb7232fcc48743000cb86d0d5022bd9?s=24'),
330 330 ('http://testX.com/{md5email}?s={size}', 'testΔ…Δ‡@foo.com', 'http://testX.com/978debb907a3c55cd741872ab293ef30?s=24'),
331 331
332 332 ('{scheme}://{netloc}/{md5email}/{size}', 'test@foo.com', 'https://server.com/3cb7232fcc48743000cb86d0d5022bd9/24'),
333 333 ('{scheme}://{netloc}/{md5email}/{size}', 'testΔ…Δ‡@foo.com', 'https://server.com/978debb907a3c55cd741872ab293ef30/24'),
334 334
335 335 ('http://test.com/{email}', 'testΔ…Δ‡@foo.com', 'http://test.com/testΔ…Δ‡@foo.com'),
336 336 ('http://test.com/{email}?size={size}', 'test@foo.com', 'http://test.com/test@foo.com?size=24'),
337 337 ('http://test.com/{email}?size={size}', 'testΔ…Δ‡@foo.com', 'http://test.com/testΔ…Δ‡@foo.com?size=24'),
338 338 ])
339 339 def test_gravatar_url_builder(tmpl_url, email, expected, request_stub):
340 340 from rhodecode.lib.helpers import gravatar_url
341 341
342 342 def fake_tmpl_context(_url):
343 343 _c = AttributeDict()
344 344 _c.visual = AttributeDict()
345 345 _c.visual.use_gravatar = True
346 346 _c.visual.gravatar_url = _url
347 347 return _c
348 348
349 349 # mock pyramid.threadlocals
350 350 def fake_get_current_request():
351 351 request_stub.scheme = 'https'
352 352 request_stub.host = 'server.com'
353 353
354 354 request_stub._call_context = fake_tmpl_context(tmpl_url)
355 355 return request_stub
356 356
357 357 with mock.patch('rhodecode.lib.helpers.get_current_request',
358 358 fake_get_current_request):
359 359
360 360 grav = gravatar_url(email_address=email, size=24)
361 361 assert grav == expected
362 362
363 363
364 364 @pytest.mark.parametrize(
365 365 "email, first_name, last_name, expected_initials, expected_color", [
366 366
367 367 ('test@rhodecode.com', '', '', 'TR', '#8a994d'),
368 368 ('marcin.kuzminski@rhodecode.com', '', '', 'MK', '#6559b3'),
369 369 # special cases of email
370 370 ('john.van.dam@rhodecode.com', '', '', 'JD', '#526600'),
371 371 ('Guido.van.Rossum@rhodecode.com', '', '', 'GR', '#990052'),
372 372 ('Guido.van.Rossum@rhodecode.com', 'Guido', 'Van Rossum', 'GR', '#990052'),
373 373
374 374 ('rhodecode+Guido.van.Rossum@rhodecode.com', '', '', 'RR', '#46598c'),
375 375 ('pclouds@rhodecode.com', 'Nguyα»…n ThΓ‘i', 'Tgọc Duy', 'ND', '#665200'),
376 376
377 377 ('john-brown@foo.com', '', '', 'JF', '#73006b'),
378 378 ('admin@rhodecode.com', 'Marcin', 'Kuzminski', 'MK', '#104036'),
379 379 # partials
380 380 ('admin@rhodecode.com', 'Marcin', '', 'MR', '#104036'), # fn+email
381 381 ('admin@rhodecode.com', '', 'Kuzminski', 'AK', '#104036'), # em+ln
382 382 # non-ascii
383 383 ('admin@rhodecode.com', 'Marcin', 'Śuzminski', 'MS', '#104036'),
384 384 ('marcin.Ε›uzminski@rhodecode.com', '', '', 'MS', '#73000f'),
385 385
386 386 # special cases, LDAP can provide those...
387 387 ('admin@', 'Marcin', 'Śuzminski', 'MS', '#aa00ff'),
388 388 ('marcin.Ε›uzminski', '', '', 'MS', '#402020'),
389 389 ('null', '', '', 'NL', '#8c4646'),
390 390 ('some.@abc.com', 'some', '', 'SA', '#664e33')
391 391 ])
392 392 def test_initials_gravatar_pick_of_initials_and_color_algo(
393 393 email, first_name, last_name, expected_initials, expected_color):
394 394 instance = InitialsGravatar(email, first_name, last_name)
395 395 assert instance.get_initials() == expected_initials
396 396 assert instance.str2color(email) == expected_color
397 397
398 398
399 399 def test_initials_gravatar_mapping_algo():
400 400 pos = set()
401 401 instance = InitialsGravatar('', '', '')
402 402 iterations = 0
403 403
404 404 variations = []
405 405 for letter1 in string.ascii_letters:
406 406 for letter2 in string.ascii_letters[::-1][:10]:
407 407 for letter3 in string.ascii_letters[:10]:
408 408 variations.append(
409 409 '%s@rhodecode.com' % (letter1+letter2+letter3))
410 410
411 411 max_variations = 4096
412 412 for email in variations[:max_variations]:
413 413 iterations += 1
414 414 pos.add(
415 415 instance.pick_color_bank_index(email,
416 416 instance.get_color_bank()))
417 417
418 418 # we assume that we have match all 256 possible positions,
419 419 # in reasonable amount of different email addresses
420 420 assert len(pos) == 256
421 421 assert iterations == max_variations
422 422
423 423
424 424 @pytest.mark.parametrize("tmpl, repo_name, overrides, prefix, expected", [
425 425 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '', 'http://vps1:8000/group/repo1'),
426 426 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/group/repo1'),
427 427 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '/rc', 'http://vps1:8000/rc/group/repo1'),
428 428 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/rc', 'http://user@vps1:8000/rc/group/repo1'),
429 429 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '/rc', 'http://marcink@vps1:8000/rc/group/repo1'),
430 430 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/rc/', 'http://user@vps1:8000/rc/group/repo1'),
431 431 (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'marcink'}, '/rc/', 'http://marcink@vps1:8000/rc/group/repo1'),
432 432 ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {}, '', 'http://vps1:8000/_23'),
433 433 ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/_23'),
434 434 ('http://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://marcink@vps1:8000/_23'),
435 435 ('http://{netloc}/_{repoid}', 'group/repo1', {'user': 'marcink'}, '', 'http://vps1:8000/_23'),
436 436 ('https://{user}@proxy1.server.com/{repo}', 'group/repo1', {'user': 'marcink'}, '', 'https://marcink@proxy1.server.com/group/repo1'),
437 437 ('https://{user}@proxy1.server.com/{repo}', 'group/repo1', {}, '', 'https://proxy1.server.com/group/repo1'),
438 438 ('https://proxy1.server.com/{user}/{repo}', 'group/repo1', {'user': 'marcink'}, '', 'https://proxy1.server.com/marcink/group/repo1'),
439 439 ])
440 440 def test_clone_url_generator(tmpl, repo_name, overrides, prefix, expected):
441 441 from rhodecode.lib.utils2 import get_clone_url
442 442
443 443 class RequestStub(object):
444 444 def request_url(self, name):
445 445 return 'http://vps1:8000' + prefix
446 446
447 447 def route_url(self, name):
448 448 return self.request_url(name)
449 449
450 450 clone_url = get_clone_url(
451 451 request=RequestStub(),
452 452 uri_tmpl=tmpl,
453 453 repo_name=repo_name, repo_id=23, **overrides)
454 454 assert clone_url == expected
455 455
456 456
457 457 def _quick_url(text, tmpl="""<a class="revision-link" href="%s">%s</a>""", url_=None):
458 458 """
459 459 Changes `some text url[foo]` => `some text <a href="/">foo</a>
460 460
461 461 :param text:
462 462 """
463 463 import re
464 464 # quickly change expected url[] into a link
465 465 URL_PAT = re.compile(r'(?:url\[)(.+?)(?:\])')
466 466
467 467 def url_func(match_obj):
468 468 _url = match_obj.groups()[0]
469 469 return tmpl % (url_ or '/some-url', _url)
470 470 return URL_PAT.sub(url_func, text)
471 471
472 472
473 473 @pytest.mark.parametrize("sample, expected", [
474 474 ("",
475 475 ""),
476 476 ("git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68",
477 477 "git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68"),
478 478 ("from rev 000000000000",
479 479 "from rev url[000000000000]"),
480 480 ("from rev 000000000000123123 also rev 000000000000",
481 481 "from rev url[000000000000123123] also rev url[000000000000]"),
482 482 ("this should-000 00",
483 483 "this should-000 00"),
484 484 ("longtextffffffffff rev 123123123123",
485 485 "longtextffffffffff rev url[123123123123]"),
486 486 ("rev ffffffffffffffffffffffffffffffffffffffffffffffffff",
487 487 "rev ffffffffffffffffffffffffffffffffffffffffffffffffff"),
488 488 ("ffffffffffff some text traalaa",
489 489 "url[ffffffffffff] some text traalaa"),
490 490 ("""Multi line
491 491 123123123123
492 492 some text 123123123123
493 493 sometimes !
494 494 """,
495 495 """Multi line
496 496 url[123123123123]
497 497 some text url[123123123123]
498 498 sometimes !
499 499 """)
500 500 ], ids=no_newline_id_generator)
501 501 def test_urlify_commits(sample, expected):
502 502 def fake_url(self, *args, **kwargs):
503 503 return '/some-url'
504 504
505 505 expected = _quick_url(expected)
506 506
507 507 with mock.patch('rhodecode.lib.helpers.route_url', fake_url):
508 508 from rhodecode.lib.helpers import urlify_commits
509 509 assert urlify_commits(sample, 'repo_name') == expected
510 510
511 511
512 512 @pytest.mark.parametrize("sample, expected, url_", [
513 513 ("",
514 514 "",
515 515 ""),
516 516 ("https://svn.apache.org/repos",
517 517 "url[https://svn.apache.org/repos]",
518 518 "https://svn.apache.org/repos"),
519 519 ("http://svn.apache.org/repos",
520 520 "url[http://svn.apache.org/repos]",
521 521 "http://svn.apache.org/repos"),
522 522 ("from rev a also rev http://google.com",
523 523 "from rev a also rev url[http://google.com]",
524 524 "http://google.com"),
525 525 ("""Multi line
526 526 https://foo.bar.com
527 527 some text lalala""",
528 528 """Multi line
529 529 url[https://foo.bar.com]
530 530 some text lalala""",
531 531 "https://foo.bar.com")
532 532 ], ids=no_newline_id_generator)
533 533 def test_urlify_test(sample, expected, url_):
534 534 from rhodecode.lib.helpers import urlify_text
535 535 expected = _quick_url(expected, tmpl="""<a href="%s">%s</a>""", url_=url_)
536 536 assert urlify_text(sample) == expected
537 537
538 538
539 539 @pytest.mark.parametrize("test, expected", [
540 540 ("", None),
541 541 ("/_2", '2'),
542 542 ("_2", '2'),
543 543 ("/_2/", '2'),
544 544 ("_2/", '2'),
545 545
546 546 ("/_21", '21'),
547 547 ("_21", '21'),
548 548 ("/_21/", '21'),
549 549 ("_21/", '21'),
550 550
551 551 ("/_21/foobar", '21'),
552 552 ("_21/121", '21'),
553 553 ("/_21/_12", '21'),
554 554 ("_21/rc/foo", '21'),
555 555
556 556 ])
557 557 def test_get_repo_by_id(test, expected):
558 558 from rhodecode.model.repo import RepoModel
559 559 _test = RepoModel()._extract_id_from_repo_name(test)
560 560 assert _test == expected
561 561
562 562
563 563 def test_invalidation_context(baseapp):
564 564 repo_id = 999
565 565
566 566 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
567 567 repo_id, CacheKey.CACHE_TYPE_README)
568 568 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
569 569 repo_id=repo_id)
570 570 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
571 571
572 572 calls = [1, 2]
573 573
574 574 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
575 575 def _dummy_func(cache_key):
576 576 val = calls.pop(0)
577 577 return 'result:{}'.format(val)
578 578
579 579 inv_context_manager = rc_cache.InvalidationContext(
580 580 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
581 581
582 582 # 1st call, fresh caches
583 583 with inv_context_manager as invalidation_context:
584 584 should_invalidate = invalidation_context.should_invalidate()
585 585 if should_invalidate:
586 _dummy_func.invalidate('some-key')
587 result = _dummy_func('some-key')
586 result = _dummy_func.refresh('some-key')
587 else:
588 result = _dummy_func('some-key')
588 589
589 590 assert isinstance(invalidation_context, rc_cache.FreshRegionCache)
590 591 assert should_invalidate is True
591 592
592 593 assert 'result:1' == result
593 594 # should be cached so calling it twice will give the same result !
594 595 result = _dummy_func('some-key')
595 596 assert 'result:1' == result
596 597
597 598 # 2nd call, we create a new context manager, this should be now key aware, and
598 599 # return an active cache region
599 600 with inv_context_manager as invalidation_context:
600 601 should_invalidate = invalidation_context.should_invalidate()
601 602 assert isinstance(invalidation_context, rc_cache.ActiveRegionCache)
602 603 assert should_invalidate is False
603 604
604 605 # Mark invalidation
605 606 CacheKey.set_invalidate(invalidation_namespace)
606 607
607 608 # 3nd call, fresh caches
608 609 with inv_context_manager as invalidation_context:
609 610 should_invalidate = invalidation_context.should_invalidate()
610 611 if should_invalidate:
611 _dummy_func.invalidate('some-key')
612 result = _dummy_func('some-key')
612 result = _dummy_func.refresh('some-key')
613 else:
614 result = _dummy_func('some-key')
613 615
614 616 assert isinstance(invalidation_context, rc_cache.FreshRegionCache)
615 617 assert should_invalidate is True
616 618
617 619 assert 'result:2' == result
618 620
619 621 # cached again, same result
620 622 result = _dummy_func('some-key')
621 623 assert 'result:2' == result
622 624
623 625
624 626 def test_invalidation_context_exception_in_compute(baseapp):
625 627 repo_id = 888
626 628
627 629 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
628 630 repo_id, CacheKey.CACHE_TYPE_README)
629 631 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
630 632 repo_id=repo_id)
631 633 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
632 634
633 635 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
634 636 def _dummy_func(cache_key):
635 637 raise Exception('Error in cache func')
636 638
637 639 with pytest.raises(Exception):
638 640 inv_context_manager = rc_cache.InvalidationContext(
639 641 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
640 642
641 643 # 1st call, fresh caches
642 644 with inv_context_manager as invalidation_context:
643 645 should_invalidate = invalidation_context.should_invalidate()
644 646 if should_invalidate:
645 _dummy_func.invalidate('some-key-2')
646 _dummy_func('some-key-2')
647 _dummy_func.refresh('some-key-2')
648 else:
649 _dummy_func('some-key-2')
647 650
648 651
649 652 @pytest.mark.parametrize('execution_number', range(5))
650 653 def test_cache_invalidation_race_condition(execution_number, baseapp):
651 654 import time
652 655
653 656 repo_id = 777
654 657
655 658 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
656 659 repo_id, CacheKey.CACHE_TYPE_README)
657 660 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
658 661 repo_id=repo_id)
659 662 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
660 663
661 664 @run_test_concurrently(25)
662 665 def test_create_and_delete_cache_keys():
663 666 time.sleep(0.2)
664 667
665 668 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
666 669 def _dummy_func(cache_key):
667 670 val = 'async'
668 671 return 'result:{}'.format(val)
669 672
670 673 inv_context_manager = rc_cache.InvalidationContext(
671 674 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
672 675
673 676 # 1st call, fresh caches
674 677 with inv_context_manager as invalidation_context:
675 678 should_invalidate = invalidation_context.should_invalidate()
676 679 if should_invalidate:
677 _dummy_func.invalidate('some-key-3')
678 _dummy_func('some-key-3')
680 _dummy_func.refresh('some-key-3')
681 else:
682 _dummy_func('some-key-3')
679 683
680 684 # Mark invalidation
681 685 CacheKey.set_invalidate(invalidation_namespace)
682 686
683 687 test_create_and_delete_cache_keys()
General Comments 0
You need to be logged in to leave comments. Login now