##// END OF EJS Templates
caches: store computation time inside context manager as helper. Since the with block is full...
marcink -
r2936:776b3361 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,244 +1,242 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2017-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import time
21 20 import pytz
22 21 import logging
23 22
24 23 from pyramid.view import view_config
25 24 from pyramid.response import Response
26 25 from webhelpers.feedgenerator import Rss201rev2Feed, Atom1Feed
27 26
28 27 from rhodecode.apps._base import RepoAppView
29 28 from rhodecode.lib import audit_logger
30 29 from rhodecode.lib import rc_cache
31 30 from rhodecode.lib import helpers as h
32 31 from rhodecode.lib.auth import (
33 32 LoginRequired, HasRepoPermissionAnyDecorator)
34 33 from rhodecode.lib.diffs import DiffProcessor, LimitedDiffContainer
35 34 from rhodecode.lib.utils2 import str2bool, safe_int, md5_safe
36 35 from rhodecode.model.db import UserApiKeys, CacheKey
37 36
38 37 log = logging.getLogger(__name__)
39 38
40 39
41 40 class RepoFeedView(RepoAppView):
42 41 def load_default_context(self):
43 42 c = self._get_local_tmpl_context()
44 43
45 44
46 45 self._load_defaults()
47 46 return c
48 47
49 48 def _get_config(self):
50 49 import rhodecode
51 50 config = rhodecode.CONFIG
52 51
53 52 return {
54 53 'language': 'en-us',
55 54 'feed_ttl': '5', # TTL of feed,
56 55 'feed_include_diff':
57 56 str2bool(config.get('rss_include_diff', False)),
58 57 'feed_items_per_page':
59 58 safe_int(config.get('rss_items_per_page', 20)),
60 59 'feed_diff_limit':
61 60 # we need to protect from parsing huge diffs here other way
62 61 # we can kill the server
63 62 safe_int(config.get('rss_cut_off_limit', 32 * 1024)),
64 63 }
65 64
66 65 def _load_defaults(self):
67 66 _ = self.request.translate
68 67 config = self._get_config()
69 68 # common values for feeds
70 69 self.description = _('Changes on %s repository')
71 70 self.title = self.title = _('%s %s feed') % (self.db_repo_name, '%s')
72 71 self.language = config["language"]
73 72 self.ttl = config["feed_ttl"]
74 73 self.feed_include_diff = config['feed_include_diff']
75 74 self.feed_diff_limit = config['feed_diff_limit']
76 75 self.feed_items_per_page = config['feed_items_per_page']
77 76
78 77 def _changes(self, commit):
79 78 diff_processor = DiffProcessor(
80 79 commit.diff(), diff_limit=self.feed_diff_limit)
81 80 _parsed = diff_processor.prepare(inline_diff=False)
82 81 limited_diff = isinstance(_parsed, LimitedDiffContainer)
83 82
84 83 return diff_processor, _parsed, limited_diff
85 84
86 85 def _get_title(self, commit):
87 86 return h.shorter(commit.message, 160)
88 87
89 88 def _get_description(self, commit):
90 89 _renderer = self.request.get_partial_renderer(
91 90 'rhodecode:templates/feed/atom_feed_entry.mako')
92 91 diff_processor, parsed_diff, limited_diff = self._changes(commit)
93 92 filtered_parsed_diff, has_hidden_changes = self.path_filter.filter_patchset(parsed_diff)
94 93 return _renderer(
95 94 'body',
96 95 commit=commit,
97 96 parsed_diff=filtered_parsed_diff,
98 97 limited_diff=limited_diff,
99 98 feed_include_diff=self.feed_include_diff,
100 99 diff_processor=diff_processor,
101 100 has_hidden_changes=has_hidden_changes
102 101 )
103 102
104 103 def _set_timezone(self, date, tzinfo=pytz.utc):
105 104 if not getattr(date, "tzinfo", None):
106 105 date.replace(tzinfo=tzinfo)
107 106 return date
108 107
109 108 def _get_commits(self):
110 109 return list(self.rhodecode_vcs_repo[-self.feed_items_per_page:])
111 110
112 111 def uid(self, repo_id, commit_id):
113 112 return '{}:{}'.format(md5_safe(repo_id), md5_safe(commit_id))
114 113
115 114 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED])
116 115 @HasRepoPermissionAnyDecorator(
117 116 'repository.read', 'repository.write', 'repository.admin')
118 117 @view_config(
119 118 route_name='atom_feed_home', request_method='GET',
120 119 renderer=None)
121 120 def atom(self):
122 121 """
123 122 Produce an atom-1.0 feed via feedgenerator module
124 123 """
125 124 self.load_default_context()
126 125
127 126 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
128 127 self.db_repo.repo_id, CacheKey.CACHE_TYPE_FEED)
129 128 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
130 129 repo_id=self.db_repo.repo_id)
131 130
132 131 region = rc_cache.get_or_create_region('cache_repo_longterm',
133 132 cache_namespace_uid)
134 133
135 134 condition = not self.path_filter.is_enabled
136 135
137 136 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
138 137 condition=condition)
139 138 def generate_atom_feed(repo_id, _repo_name, _feed_type):
140 139 feed = Atom1Feed(
141 140 title=self.title % _repo_name,
142 141 link=h.route_url('repo_summary', repo_name=_repo_name),
143 142 description=self.description % _repo_name,
144 143 language=self.language,
145 144 ttl=self.ttl
146 145 )
147 146
148 147 for commit in reversed(self._get_commits()):
149 148 date = self._set_timezone(commit.date)
150 149 feed.add_item(
151 150 unique_id=self.uid(repo_id, commit.raw_id),
152 151 title=self._get_title(commit),
153 152 author_name=commit.author,
154 153 description=self._get_description(commit),
155 154 link=h.route_url(
156 155 'repo_commit', repo_name=_repo_name,
157 156 commit_id=commit.raw_id),
158 157 pubdate=date,)
159 158
160 159 return feed.mime_type, feed.writeString('utf-8')
161 160
162 start = time.time()
163 161 inv_context_manager = rc_cache.InvalidationContext(
164 162 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
165 163 with inv_context_manager as invalidation_context:
166 164 # check for stored invalidation signal, and maybe purge the cache
167 165 # before computing it again
168 166 if invalidation_context.should_invalidate():
169 167 generate_atom_feed.invalidate(
170 168 self.db_repo.repo_id, self.db_repo.repo_name, 'atom')
171 169
172 170 mime_type, feed = generate_atom_feed(
173 171 self.db_repo.repo_id, self.db_repo.repo_name, 'atom')
174 compute_time = time.time() - start
175 log.debug('Repo ATOM feed computed in %.3fs', compute_time)
172
173 log.debug('Repo ATOM feed computed in %.3fs',
174 inv_context_manager.compute_time)
176 175
177 176 response = Response(feed)
178 177 response.content_type = mime_type
179 178 return response
180 179
181 180 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED])
182 181 @HasRepoPermissionAnyDecorator(
183 182 'repository.read', 'repository.write', 'repository.admin')
184 183 @view_config(
185 184 route_name='rss_feed_home', request_method='GET',
186 185 renderer=None)
187 186 def rss(self):
188 187 """
189 188 Produce an rss2 feed via feedgenerator module
190 189 """
191 190 self.load_default_context()
192 191
193 192 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
194 193 self.db_repo.repo_id, CacheKey.CACHE_TYPE_FEED)
195 194 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
196 195 repo_id=self.db_repo.repo_id)
197 196 region = rc_cache.get_or_create_region('cache_repo_longterm',
198 197 cache_namespace_uid)
199 198
200 199 condition = not self.path_filter.is_enabled
201 200
202 201 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
203 202 condition=condition)
204 203 def generate_rss_feed(repo_id, _repo_name, _feed_type):
205 204 feed = Rss201rev2Feed(
206 205 title=self.title % _repo_name,
207 206 link=h.route_url('repo_summary', repo_name=_repo_name),
208 207 description=self.description % _repo_name,
209 208 language=self.language,
210 209 ttl=self.ttl
211 210 )
212 211
213 212 for commit in reversed(self._get_commits()):
214 213 date = self._set_timezone(commit.date)
215 214 feed.add_item(
216 215 unique_id=self.uid(repo_id, commit.raw_id),
217 216 title=self._get_title(commit),
218 217 author_name=commit.author,
219 218 description=self._get_description(commit),
220 219 link=h.route_url(
221 220 'repo_commit', repo_name=_repo_name,
222 221 commit_id=commit.raw_id),
223 222 pubdate=date,)
224 223
225 224 return feed.mime_type, feed.writeString('utf-8')
226 225
227 start = time.time()
228 226 inv_context_manager = rc_cache.InvalidationContext(
229 227 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
230 228 with inv_context_manager as invalidation_context:
231 229 # check for stored invalidation signal, and maybe purge the cache
232 230 # before computing it again
233 231 if invalidation_context.should_invalidate():
234 232 generate_rss_feed.invalidate(
235 233 self.db_repo.repo_id, self.db_repo.repo_name, 'rss')
236 234
237 235 mime_type, feed = generate_rss_feed(
238 236 self.db_repo.repo_id, self.db_repo.repo_name, 'rss')
239 compute_time = time.time() - start
240 log.debug('Repo RSS feed computed in %.3fs', compute_time)
237 log.debug(
238 'Repo RSS feed computed in %.3fs', inv_context_manager.compute_time)
241 239
242 240 response = Response(feed)
243 241 response.content_type = mime_type
244 242 return response
@@ -1,392 +1,392 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 import time
22 21 import logging
23 22 import string
24 23 import rhodecode
25 24
26 25 from pyramid.view import view_config
27 26
28 27 from rhodecode.controllers import utils
29 28 from rhodecode.apps._base import RepoAppView
30 29 from rhodecode.config.conf import (LANGUAGES_EXTENSIONS_MAP)
31 30 from rhodecode.lib import helpers as h, rc_cache
32 31 from rhodecode.lib.utils2 import safe_str, safe_int
33 32 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
34 33 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
35 34 from rhodecode.lib.ext_json import json
36 35 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 36 from rhodecode.lib.vcs.exceptions import (
38 37 CommitError, EmptyRepositoryError, CommitDoesNotExistError)
39 38 from rhodecode.model.db import Statistics, CacheKey, User
40 39 from rhodecode.model.meta import Session
41 40 from rhodecode.model.repo import ReadmeFinder
42 41 from rhodecode.model.scm import ScmModel
43 42
44 43 log = logging.getLogger(__name__)
45 44
46 45
47 46 class RepoSummaryView(RepoAppView):
48 47
49 48 def load_default_context(self):
50 49 c = self._get_local_tmpl_context(include_app_defaults=True)
51 50 c.rhodecode_repo = None
52 51 if not c.repository_requirements_missing:
53 52 c.rhodecode_repo = self.rhodecode_vcs_repo
54 53 return c
55 54
56 55 def _get_readme_data(self, db_repo, renderer_type):
57 56
58 57 log.debug('Looking for README file')
59 58
60 59 cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
61 60 db_repo.repo_id, CacheKey.CACHE_TYPE_README)
62 61 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
63 62 repo_id=self.db_repo.repo_id)
64 63 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
65 64
66 65 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
67 66 def generate_repo_readme(repo_id, _repo_name, _renderer_type):
68 67 readme_data = None
69 68 readme_node = None
70 69 readme_filename = None
71 70 commit = self._get_landing_commit_or_none(db_repo)
72 71 if commit:
73 72 log.debug("Searching for a README file.")
74 73 readme_node = ReadmeFinder(_renderer_type).search(commit)
75 74 if readme_node:
76 75 relative_urls = {
77 76 'raw': h.route_path(
78 77 'repo_file_raw', repo_name=_repo_name,
79 78 commit_id=commit.raw_id, f_path=readme_node.path),
80 79 'standard': h.route_path(
81 80 'repo_files', repo_name=_repo_name,
82 81 commit_id=commit.raw_id, f_path=readme_node.path),
83 82 }
84 83 readme_data = self._render_readme_or_none(
85 84 commit, readme_node, relative_urls)
86 85 readme_filename = readme_node.path
87 86 return readme_data, readme_filename
88 87
89 start = time.time()
90 88 inv_context_manager = rc_cache.InvalidationContext(
91 89 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
92 90 with inv_context_manager as invalidation_context:
93 91 # check for stored invalidation signal, and maybe purge the cache
94 92 # before computing it again
95 93 if invalidation_context.should_invalidate():
96 94 generate_repo_readme.invalidate(
97 95 db_repo.repo_id, db_repo.repo_name, renderer_type)
98 96
99 97 instance = generate_repo_readme(
100 98 db_repo.repo_id, db_repo.repo_name, renderer_type)
101 compute_time = time.time() - start
102 log.debug('Repo readme generated and computed in %.3fs', compute_time)
99
100 log.debug(
101 'Repo readme generated and computed in %.3fs',
102 inv_context_manager.compute_time)
103 103 return instance
104 104
105 105 def _get_landing_commit_or_none(self, db_repo):
106 106 log.debug("Getting the landing commit.")
107 107 try:
108 108 commit = db_repo.get_landing_commit()
109 109 if not isinstance(commit, EmptyCommit):
110 110 return commit
111 111 else:
112 112 log.debug("Repository is empty, no README to render.")
113 113 except CommitError:
114 114 log.exception(
115 115 "Problem getting commit when trying to render the README.")
116 116
117 117 def _render_readme_or_none(self, commit, readme_node, relative_urls):
118 118 log.debug(
119 119 'Found README file `%s` rendering...', readme_node.path)
120 120 renderer = MarkupRenderer()
121 121 try:
122 122 html_source = renderer.render(
123 123 readme_node.content, filename=readme_node.path)
124 124 if relative_urls:
125 125 return relative_links(html_source, relative_urls)
126 126 return html_source
127 127 except Exception:
128 128 log.exception(
129 129 "Exception while trying to render the README")
130 130
131 131 def _load_commits_context(self, c):
132 132 p = safe_int(self.request.GET.get('page'), 1)
133 133 size = safe_int(self.request.GET.get('size'), 10)
134 134
135 135 def url_generator(**kw):
136 136 query_params = {
137 137 'size': size
138 138 }
139 139 query_params.update(kw)
140 140 return h.route_path(
141 141 'repo_summary_commits',
142 142 repo_name=c.rhodecode_db_repo.repo_name, _query=query_params)
143 143
144 144 pre_load = ['author', 'branch', 'date', 'message']
145 145 try:
146 146 collection = self.rhodecode_vcs_repo.get_commits(pre_load=pre_load)
147 147 except EmptyRepositoryError:
148 148 collection = self.rhodecode_vcs_repo
149 149
150 150 c.repo_commits = h.RepoPage(
151 151 collection, page=p, items_per_page=size, url=url_generator)
152 152 page_ids = [x.raw_id for x in c.repo_commits]
153 153 c.comments = self.db_repo.get_comments(page_ids)
154 154 c.statuses = self.db_repo.statuses(page_ids)
155 155
156 156 @LoginRequired()
157 157 @HasRepoPermissionAnyDecorator(
158 158 'repository.read', 'repository.write', 'repository.admin')
159 159 @view_config(
160 160 route_name='repo_summary_commits', request_method='GET',
161 161 renderer='rhodecode:templates/summary/summary_commits.mako')
162 162 def summary_commits(self):
163 163 c = self.load_default_context()
164 164 self._load_commits_context(c)
165 165 return self._get_template_context(c)
166 166
167 167 @LoginRequired()
168 168 @HasRepoPermissionAnyDecorator(
169 169 'repository.read', 'repository.write', 'repository.admin')
170 170 @view_config(
171 171 route_name='repo_summary', request_method='GET',
172 172 renderer='rhodecode:templates/summary/summary.mako')
173 173 @view_config(
174 174 route_name='repo_summary_slash', request_method='GET',
175 175 renderer='rhodecode:templates/summary/summary.mako')
176 176 @view_config(
177 177 route_name='repo_summary_explicit', request_method='GET',
178 178 renderer='rhodecode:templates/summary/summary.mako')
179 179 def summary(self):
180 180 c = self.load_default_context()
181 181
182 182 # Prepare the clone URL
183 183 username = ''
184 184 if self._rhodecode_user.username != User.DEFAULT_USER:
185 185 username = safe_str(self._rhodecode_user.username)
186 186
187 187 _def_clone_uri = _def_clone_uri_id = c.clone_uri_tmpl
188 188 _def_clone_uri_ssh = c.clone_uri_ssh_tmpl
189 189
190 190 if '{repo}' in _def_clone_uri:
191 191 _def_clone_uri_id = _def_clone_uri.replace(
192 192 '{repo}', '_{repoid}')
193 193 elif '{repoid}' in _def_clone_uri:
194 194 _def_clone_uri_id = _def_clone_uri.replace(
195 195 '_{repoid}', '{repo}')
196 196
197 197 c.clone_repo_url = self.db_repo.clone_url(
198 198 user=username, uri_tmpl=_def_clone_uri)
199 199 c.clone_repo_url_id = self.db_repo.clone_url(
200 200 user=username, uri_tmpl=_def_clone_uri_id)
201 201 c.clone_repo_url_ssh = self.db_repo.clone_url(
202 202 uri_tmpl=_def_clone_uri_ssh, ssh=True)
203 203
204 204 # If enabled, get statistics data
205 205
206 206 c.show_stats = bool(self.db_repo.enable_statistics)
207 207
208 208 stats = Session().query(Statistics) \
209 209 .filter(Statistics.repository == self.db_repo) \
210 210 .scalar()
211 211
212 212 c.stats_percentage = 0
213 213
214 214 if stats and stats.languages:
215 215 c.no_data = False is self.db_repo.enable_statistics
216 216 lang_stats_d = json.loads(stats.languages)
217 217
218 218 # Sort first by decreasing count and second by the file extension,
219 219 # so we have a consistent output.
220 220 lang_stats_items = sorted(lang_stats_d.iteritems(),
221 221 key=lambda k: (-k[1], k[0]))[:10]
222 222 lang_stats = [(x, {"count": y,
223 223 "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
224 224 for x, y in lang_stats_items]
225 225
226 226 c.trending_languages = json.dumps(lang_stats)
227 227 else:
228 228 c.no_data = True
229 229 c.trending_languages = json.dumps({})
230 230
231 231 scm_model = ScmModel()
232 232 c.enable_downloads = self.db_repo.enable_downloads
233 233 c.repository_followers = scm_model.get_followers(self.db_repo)
234 234 c.repository_forks = scm_model.get_forks(self.db_repo)
235 235 c.repository_is_user_following = scm_model.is_following_repo(
236 236 self.db_repo_name, self._rhodecode_user.user_id)
237 237
238 238 # first interaction with the VCS instance after here...
239 239 if c.repository_requirements_missing:
240 240 self.request.override_renderer = \
241 241 'rhodecode:templates/summary/missing_requirements.mako'
242 242 return self._get_template_context(c)
243 243
244 244 c.readme_data, c.readme_file = \
245 245 self._get_readme_data(self.db_repo, c.visual.default_renderer)
246 246
247 247 # loads the summary commits template context
248 248 self._load_commits_context(c)
249 249
250 250 return self._get_template_context(c)
251 251
252 252 def get_request_commit_id(self):
253 253 return self.request.matchdict['commit_id']
254 254
255 255 @LoginRequired()
256 256 @HasRepoPermissionAnyDecorator(
257 257 'repository.read', 'repository.write', 'repository.admin')
258 258 @view_config(
259 259 route_name='repo_stats', request_method='GET',
260 260 renderer='json_ext')
261 261 def repo_stats(self):
262 262 commit_id = self.get_request_commit_id()
263 263 show_stats = bool(self.db_repo.enable_statistics)
264 264 repo_id = self.db_repo.repo_id
265 265
266 266 cache_seconds = safe_int(
267 267 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
268 268 cache_on = cache_seconds > 0
269 269 log.debug(
270 270 'Computing REPO TREE for repo_id %s commit_id `%s` '
271 271 'with caching: %s[TTL: %ss]' % (
272 272 repo_id, commit_id, cache_on, cache_seconds or 0))
273 273
274 274 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
275 275 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
276 276
277 277 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
278 278 condition=cache_on)
279 279 def compute_stats(repo_id, commit_id, show_stats):
280 280 code_stats = {}
281 281 size = 0
282 282 try:
283 283 scm_instance = self.db_repo.scm_instance()
284 284 commit = scm_instance.get_commit(commit_id)
285 285
286 286 for node in commit.get_filenodes_generator():
287 287 size += node.size
288 288 if not show_stats:
289 289 continue
290 290 ext = string.lower(node.extension)
291 291 ext_info = LANGUAGES_EXTENSIONS_MAP.get(ext)
292 292 if ext_info:
293 293 if ext in code_stats:
294 294 code_stats[ext]['count'] += 1
295 295 else:
296 296 code_stats[ext] = {"count": 1, "desc": ext_info}
297 297 except (EmptyRepositoryError, CommitDoesNotExistError):
298 298 pass
299 299 return {'size': h.format_byte_size_binary(size),
300 300 'code_stats': code_stats}
301 301
302 302 stats = compute_stats(self.db_repo.repo_id, commit_id, show_stats)
303 303 return stats
304 304
305 305 @LoginRequired()
306 306 @HasRepoPermissionAnyDecorator(
307 307 'repository.read', 'repository.write', 'repository.admin')
308 308 @view_config(
309 309 route_name='repo_refs_data', request_method='GET',
310 310 renderer='json_ext')
311 311 def repo_refs_data(self):
312 312 _ = self.request.translate
313 313 self.load_default_context()
314 314
315 315 repo = self.rhodecode_vcs_repo
316 316 refs_to_create = [
317 317 (_("Branch"), repo.branches, 'branch'),
318 318 (_("Tag"), repo.tags, 'tag'),
319 319 (_("Bookmark"), repo.bookmarks, 'book'),
320 320 ]
321 321 res = self._create_reference_data(
322 322 repo, self.db_repo_name, refs_to_create)
323 323 data = {
324 324 'more': False,
325 325 'results': res
326 326 }
327 327 return data
328 328
329 329 @LoginRequired()
330 330 @HasRepoPermissionAnyDecorator(
331 331 'repository.read', 'repository.write', 'repository.admin')
332 332 @view_config(
333 333 route_name='repo_refs_changelog_data', request_method='GET',
334 334 renderer='json_ext')
335 335 def repo_refs_changelog_data(self):
336 336 _ = self.request.translate
337 337 self.load_default_context()
338 338
339 339 repo = self.rhodecode_vcs_repo
340 340
341 341 refs_to_create = [
342 342 (_("Branches"), repo.branches, 'branch'),
343 343 (_("Closed branches"), repo.branches_closed, 'branch_closed'),
344 344 # TODO: enable when vcs can handle bookmarks filters
345 345 # (_("Bookmarks"), repo.bookmarks, "book"),
346 346 ]
347 347 res = self._create_reference_data(
348 348 repo, self.db_repo_name, refs_to_create)
349 349 data = {
350 350 'more': False,
351 351 'results': res
352 352 }
353 353 return data
354 354
355 355 def _create_reference_data(self, repo, full_repo_name, refs_to_create):
356 356 format_ref_id = utils.get_format_ref_id(repo)
357 357
358 358 result = []
359 359 for title, refs, ref_type in refs_to_create:
360 360 if refs:
361 361 result.append({
362 362 'text': title,
363 363 'children': self._create_reference_items(
364 364 repo, full_repo_name, refs, ref_type,
365 365 format_ref_id),
366 366 })
367 367 return result
368 368
369 369 def _create_reference_items(self, repo, full_repo_name, refs, ref_type,
370 370 format_ref_id):
371 371 result = []
372 372 is_svn = h.is_svn(repo)
373 373 for ref_name, raw_id in refs.iteritems():
374 374 files_url = self._create_files_url(
375 375 repo, full_repo_name, ref_name, raw_id, is_svn)
376 376 result.append({
377 377 'text': ref_name,
378 378 'id': format_ref_id(ref_name, raw_id),
379 379 'raw_id': raw_id,
380 380 'type': ref_type,
381 381 'files_url': files_url,
382 382 })
383 383 return result
384 384
385 385 def _create_files_url(self, repo, full_repo_name, ref_name, raw_id, is_svn):
386 386 use_commit_id = '/' in ref_name or is_svn
387 387 return h.route_path(
388 388 'repo_files',
389 389 repo_name=full_repo_name,
390 390 f_path=ref_name if is_svn else '',
391 391 commit_id=raw_id if use_commit_id else ref_name,
392 392 _query=dict(at=ref_name))
@@ -1,318 +1,319 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import os
21 import time
21 22 import logging
22 23 import functools
23 24 import threading
24 25
25 26 from dogpile.cache import CacheRegion
26 27 from dogpile.cache.util import compat
27 28
28 29 import rhodecode
29 30 from rhodecode.lib.utils import safe_str, sha1
30 31 from rhodecode.lib.utils2 import safe_unicode, str2bool
31 32 from rhodecode.model.db import Session, CacheKey, IntegrityError
32 33
33 34 from . import region_meta
34 35
35 36 log = logging.getLogger(__name__)
36 37
37 38
38 39 class RhodeCodeCacheRegion(CacheRegion):
39 40
40 41 def conditional_cache_on_arguments(
41 42 self, namespace=None,
42 43 expiration_time=None,
43 44 should_cache_fn=None,
44 45 to_str=compat.string_type,
45 46 function_key_generator=None,
46 47 condition=True):
47 48 """
48 49 Custom conditional decorator, that will not touch any dogpile internals if
49 50 condition isn't meet. This works a bit different than should_cache_fn
50 51 And it's faster in cases we don't ever want to compute cached values
51 52 """
52 53 expiration_time_is_callable = compat.callable(expiration_time)
53 54
54 55 if function_key_generator is None:
55 56 function_key_generator = self.function_key_generator
56 57
57 58 def decorator(fn):
58 59 if to_str is compat.string_type:
59 60 # backwards compatible
60 61 key_generator = function_key_generator(namespace, fn)
61 62 else:
62 63 key_generator = function_key_generator(namespace, fn, to_str=to_str)
63 64
64 65 @functools.wraps(fn)
65 66 def decorate(*arg, **kw):
66 67 key = key_generator(*arg, **kw)
67 68
68 69 @functools.wraps(fn)
69 70 def creator():
70 71 return fn(*arg, **kw)
71 72
72 73 if not condition:
73 74 return creator()
74 75
75 76 timeout = expiration_time() if expiration_time_is_callable \
76 77 else expiration_time
77 78
78 79 return self.get_or_create(key, creator, timeout, should_cache_fn)
79 80
80 81 def invalidate(*arg, **kw):
81 82 key = key_generator(*arg, **kw)
82 83 self.delete(key)
83 84
84 85 def set_(value, *arg, **kw):
85 86 key = key_generator(*arg, **kw)
86 87 self.set(key, value)
87 88
88 89 def get(*arg, **kw):
89 90 key = key_generator(*arg, **kw)
90 91 return self.get(key)
91 92
92 93 def refresh(*arg, **kw):
93 94 key = key_generator(*arg, **kw)
94 95 value = fn(*arg, **kw)
95 96 self.set(key, value)
96 97 return value
97 98
98 99 decorate.set = set_
99 100 decorate.invalidate = invalidate
100 101 decorate.refresh = refresh
101 102 decorate.get = get
102 103 decorate.original = fn
103 104 decorate.key_generator = key_generator
104 105
105 106 return decorate
106 107
107 108 return decorator
108 109
109 110
110 111 def make_region(*arg, **kw):
111 112 return RhodeCodeCacheRegion(*arg, **kw)
112 113
113 114
114 115 def get_default_cache_settings(settings, prefixes=None):
115 116 prefixes = prefixes or []
116 117 cache_settings = {}
117 118 for key in settings.keys():
118 119 for prefix in prefixes:
119 120 if key.startswith(prefix):
120 121 name = key.split(prefix)[1].strip()
121 122 val = settings[key]
122 123 if isinstance(val, basestring):
123 124 val = val.strip()
124 125 cache_settings[name] = val
125 126 return cache_settings
126 127
127 128
128 129 def compute_key_from_params(*args):
129 130 """
130 131 Helper to compute key from given params to be used in cache manager
131 132 """
132 133 return sha1("_".join(map(safe_str, args)))
133 134
134 135
135 136 def key_generator(namespace, fn):
136 137 fname = fn.__name__
137 138
138 139 def generate_key(*args):
139 140 namespace_pref = namespace or 'default'
140 141 arg_key = compute_key_from_params(*args)
141 142 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
142 143
143 144 return final_key
144 145
145 146 return generate_key
146 147
147 148
148 149 def get_or_create_region(region_name, region_namespace=None):
149 150 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
150 151 region_obj = region_meta.dogpile_cache_regions.get(region_name)
151 152 if not region_obj:
152 153 raise EnvironmentError(
153 154 'Region `{}` not in configured: {}.'.format(
154 155 region_name, region_meta.dogpile_cache_regions.keys()))
155 156
156 157 region_uid_name = '{}:{}'.format(region_name, region_namespace)
157 158 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
158 159 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
159 160 if region_exist:
160 161 log.debug('Using already configured region: %s', region_namespace)
161 162 return region_exist
162 163 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
163 164 expiration_time = region_obj.expiration_time
164 165
165 166 if not os.path.isdir(cache_dir):
166 167 os.makedirs(cache_dir)
167 168 new_region = make_region(
168 169 name=region_uid_name, function_key_generator=key_generator
169 170 )
170 171 namespace_filename = os.path.join(
171 172 cache_dir, "{}.cache.dbm".format(region_namespace))
172 173 # special type that allows 1db per namespace
173 174 new_region.configure(
174 175 backend='dogpile.cache.rc.file_namespace',
175 176 expiration_time=expiration_time,
176 177 arguments={"filename": namespace_filename}
177 178 )
178 179
179 180 # create and save in region caches
180 181 log.debug('configuring new region: %s',region_uid_name)
181 182 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
182 183
183 184 return region_obj
184 185
185 186
186 187 def clear_cache_namespace(cache_region, cache_namespace_uid):
187 188 region = get_or_create_region(cache_region, cache_namespace_uid)
188 189 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
189 190 region.delete_multi(cache_keys)
190 191 return len(cache_keys)
191 192
192 193
193 194 class ActiveRegionCache(object):
194 195 def __init__(self, context):
195 196 self.context = context
196 197
197 198 def should_invalidate(self):
198 199 return False
199 200
200 201
201 202 class FreshRegionCache(object):
202 203 def __init__(self, context):
203 204 self.context = context
204 205
205 206 def should_invalidate(self):
206 207 return True
207 208
208 209
209 210 class InvalidationContext(object):
210 211 """
211 212 usage::
212 213
213 import time
214 214 from rhodecode.lib import rc_cache
215 my_id = 1
216 cache_namespace_uid = 'cache_demo.{}'.format(my_id)
217 invalidation_namespace = 'repo_cache:1'
215
216 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
218 217 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
219 218
220 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
221 expiration_time=30,
222 condition=True)
219 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
223 220 def heavy_compute(cache_name, param1, param2):
224 221 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
225 import time
226 time.sleep(30)
227 return True
228 222
229 start = time.time()
223 # invalidation namespace is shared namespace key for all process caches
224 # we use it to send a global signal
225 invalidation_namespace = 'repo_cache:1'
226
230 227 inv_context_manager = rc_cache.InvalidationContext(
231 228 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
232 229 with inv_context_manager as invalidation_context:
233 230 # check for stored invalidation signal, and maybe purge the cache
234 231 # before computing it again
235 232 if invalidation_context.should_invalidate():
236 233 heavy_compute.invalidate('some_name', 'param1', 'param2')
237 234
238 235 result = heavy_compute('some_name', 'param1', 'param2')
239 compute_time = time.time() - start
236 compute_time = inv_context_manager.compute_time
240 237 print(compute_time)
241 238
242 239 # To send global invalidation signal, simply run
243 240 CacheKey.set_invalidate(invalidation_namespace)
244 241
245 242 """
246 243
247 244 def __repr__(self):
248 245 return '<InvalidationContext:{}[{}]>'.format(
249 246 safe_str(self.cache_key), safe_str(self.uid))
250 247
251 248 def __init__(self, uid, invalidation_namespace='',
252 249 raise_exception=False, thread_scoped=None):
253 250 self.uid = uid
254 251 self.invalidation_namespace = invalidation_namespace
255 252 self.raise_exception = raise_exception
256 253 self.proc_id = safe_unicode(rhodecode.CONFIG.get('instance_id') or 'DEFAULT')
257 254 self.thread_id = 'global'
258 255
259 256 if thread_scoped is None:
260 257 # if we set "default" we can override this via .ini settings
261 258 thread_scoped = str2bool(rhodecode.CONFIG.get('cache_thread_scoped'))
262 259
263 260 # Append the thread id to the cache key if this invalidation context
264 261 # should be scoped to the current thread.
265 262 if thread_scoped is True:
266 263 self.thread_id = threading.current_thread().ident
267 264
268 265 self.cache_key = compute_key_from_params(uid)
269 266 self.cache_key = 'proc:{}_thread:{}_{}'.format(
270 267 self.proc_id, self.thread_id, self.cache_key)
268 self.compute_time = 0
271 269
272 270 def get_or_create_cache_obj(self, uid, invalidation_namespace=''):
273 271 log.debug('Checking if %s cache key is present and active', self.cache_key)
274 272 cache_obj = CacheKey.get_active_cache(self.cache_key)
275 273 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
276 274 if not cache_obj:
277 275 cache_obj = CacheKey(self.cache_key, cache_args=invalidation_namespace)
278 276 return cache_obj
279 277
280 278 def __enter__(self):
281 279 """
282 280 Test if current object is valid, and return CacheRegion function
283 281 that does invalidation and calculation
284 282 """
285 283 # register or get a new key based on uid
286 284 self.cache_obj = self.get_or_create_cache_obj(uid=self.uid)
287
285 self._start_time = time.time()
288 286 if self.cache_obj.cache_active:
289 287 # means our cache obj is existing and marked as it's
290 288 # cache is not outdated, we return ActiveRegionCache
291 289 self.skip_cache_active_change = True
290
292 291 return ActiveRegionCache(context=self)
293 292
294 # the key is either not existing or set to False, we return
293 # the key is either not existing or set to False, we return
295 294 # the real invalidator which re-computes value. We additionally set
296 295 # the flag to actually update the Database objects
297 296 self.skip_cache_active_change = False
298 297 return FreshRegionCache(context=self)
299 298
300 299 def __exit__(self, exc_type, exc_val, exc_tb):
300 # save compute time
301 self.compute_time = time.time() - self._start_time
301 302
302 303 if self.skip_cache_active_change:
303 304 return
304 305
305 306 try:
306 307 self.cache_obj.cache_active = True
307 308 Session().add(self.cache_obj)
308 309 Session().commit()
309 310 except IntegrityError:
310 311 # if we catch integrity error, it means we inserted this object
311 312 # assumption is that's really an edge race-condition case and
312 313 # it's safe is to skip it
313 314 Session().rollback()
314 315 except Exception:
315 316 log.exception('Failed to commit on cache key update')
316 317 Session().rollback()
317 318 if self.raise_exception:
318 319 raise
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now