Show More
@@ -1,243 +1,254 b'' | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
3 | # Model for RhodeCode |
|
3 | # Model for RhodeCode | |
4 | # Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com> |
|
4 | # Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com> | |
5 | # |
|
5 | # | |
6 | # This program is free software; you can redistribute it and/or |
|
6 | # This program is free software; you can redistribute it and/or | |
7 | # modify it under the terms of the GNU General Public License |
|
7 | # modify it under the terms of the GNU General Public License | |
8 | # as published by the Free Software Foundation; version 2 |
|
8 | # as published by the Free Software Foundation; version 2 | |
9 | # of the License or (at your opinion) any later version of the license. |
|
9 | # of the License or (at your opinion) any later version of the license. | |
10 | # |
|
10 | # | |
11 | # This program is distributed in the hope that it will be useful, |
|
11 | # This program is distributed in the hope that it will be useful, | |
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | # GNU General Public License for more details. |
|
14 | # GNU General Public License for more details. | |
15 | # |
|
15 | # | |
16 | # You should have received a copy of the GNU General Public License |
|
16 | # You should have received a copy of the GNU General Public License | |
17 | # along with this program; if not, write to the Free Software |
|
17 | # along with this program; if not, write to the Free Software | |
18 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, |
|
18 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, | |
19 | # MA 02110-1301, USA. |
|
19 | # MA 02110-1301, USA. | |
20 | """ |
|
20 | """ | |
21 | Created on April 9, 2010 |
|
21 | Created on April 9, 2010 | |
22 | Model for RhodeCode |
|
22 | Model for RhodeCode | |
23 | @author: marcink |
|
23 | @author: marcink | |
24 | """ |
|
24 | """ | |
25 | from beaker.cache import cache_region, region_invalidate |
|
25 | from beaker.cache import cache_region, region_invalidate | |
26 | from mercurial import ui |
|
26 | from mercurial import ui | |
27 | from rhodecode import BACKENDS |
|
27 | from rhodecode import BACKENDS | |
28 | from rhodecode.lib import helpers as h |
|
28 | from rhodecode.lib import helpers as h | |
29 | from rhodecode.lib.auth import HasRepoPermissionAny |
|
29 | from rhodecode.lib.auth import HasRepoPermissionAny | |
30 | from rhodecode.lib.utils import get_repos, make_ui |
|
30 | from rhodecode.lib.utils import get_repos, make_ui | |
31 | from rhodecode.model import meta |
|
31 | from rhodecode.model import meta | |
32 | from rhodecode.model.db import Repository, User, RhodeCodeUi, CacheInvalidation |
|
32 | from rhodecode.model.db import Repository, User, RhodeCodeUi, CacheInvalidation | |
33 | from rhodecode.model.caching_query import FromCache |
|
33 | from rhodecode.model.caching_query import FromCache | |
34 | from sqlalchemy.orm import joinedload |
|
34 | from sqlalchemy.orm import joinedload | |
35 | from sqlalchemy.orm.session import make_transient |
|
35 | from sqlalchemy.orm.session import make_transient | |
36 | from vcs import get_backend |
|
36 | from vcs import get_backend | |
37 | from vcs.utils.helpers import get_scm |
|
37 | from vcs.utils.helpers import get_scm | |
38 | from vcs.exceptions import RepositoryError, VCSError |
|
38 | from vcs.exceptions import RepositoryError, VCSError | |
39 | from vcs.utils.lazy import LazyProperty |
|
39 | from vcs.utils.lazy import LazyProperty | |
40 | import traceback |
|
40 | import traceback | |
41 | import logging |
|
41 | import logging | |
42 | import os |
|
42 | import os | |
43 | import time |
|
43 | import time | |
44 |
|
44 | |||
45 | log = logging.getLogger(__name__) |
|
45 | log = logging.getLogger(__name__) | |
46 |
|
46 | |||
47 | class ScmModel(object): |
|
47 | class ScmModel(object): | |
48 | """ |
|
48 | """ | |
49 | Mercurial Model |
|
49 | Mercurial Model | |
50 | """ |
|
50 | """ | |
51 |
|
51 | |||
52 | def __init__(self): |
|
52 | def __init__(self): | |
53 | self.sa = meta.Session() |
|
53 | self.sa = meta.Session() | |
54 |
|
54 | |||
55 | @LazyProperty |
|
55 | @LazyProperty | |
56 | def repos_path(self): |
|
56 | def repos_path(self): | |
57 | """ |
|
57 | """ | |
58 | Get's the repositories root path from database |
|
58 | Get's the repositories root path from database | |
59 | """ |
|
59 | """ | |
60 | q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one() |
|
60 | q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one() | |
61 |
|
61 | |||
62 | return q.ui_value |
|
62 | return q.ui_value | |
63 |
|
63 | |||
64 | def repo_scan(self, repos_path, baseui, initial=False): |
|
64 | def repo_scan(self, repos_path, baseui, initial=False): | |
65 | """ |
|
65 | """ | |
66 | Listing of repositories in given path. This path should not be a |
|
66 | Listing of repositories in given path. This path should not be a | |
67 | repository itself. Return a dictionary of repository objects |
|
67 | repository itself. Return a dictionary of repository objects | |
68 |
|
68 | |||
69 | :param repos_path: path to directory containing repositories |
|
69 | :param repos_path: path to directory containing repositories | |
70 | :param baseui |
|
70 | :param baseui | |
71 | :param initial: initial scan |
|
71 | :param initial: initial scan | |
72 | """ |
|
72 | """ | |
73 | log.info('scanning for repositories in %s', repos_path) |
|
73 | log.info('scanning for repositories in %s', repos_path) | |
74 |
|
74 | |||
75 | if not isinstance(baseui, ui.ui): |
|
75 | if not isinstance(baseui, ui.ui): | |
76 | baseui = make_ui('db') |
|
76 | baseui = make_ui('db') | |
77 | repos_list = {} |
|
77 | repos_list = {} | |
78 |
|
78 | |||
79 | for name, path in get_repos(repos_path): |
|
79 | for name, path in get_repos(repos_path): | |
80 | try: |
|
80 | try: | |
81 | if repos_list.has_key(name): |
|
81 | if repos_list.has_key(name): | |
82 | raise RepositoryError('Duplicate repository name %s ' |
|
82 | raise RepositoryError('Duplicate repository name %s ' | |
83 | 'found in %s' % (name, path)) |
|
83 | 'found in %s' % (name, path)) | |
84 | else: |
|
84 | else: | |
85 |
|
85 | |||
86 | klass = get_backend(path[0]) |
|
86 | klass = get_backend(path[0]) | |
87 |
|
87 | |||
88 | if path[0] == 'hg' and path[0] in BACKENDS.keys(): |
|
88 | if path[0] == 'hg' and path[0] in BACKENDS.keys(): | |
89 | repos_list[name] = klass(path[1], baseui=baseui) |
|
89 | repos_list[name] = klass(path[1], baseui=baseui) | |
90 |
|
90 | |||
91 | if path[0] == 'git' and path[0] in BACKENDS.keys(): |
|
91 | if path[0] == 'git' and path[0] in BACKENDS.keys(): | |
92 | repos_list[name] = klass(path[1]) |
|
92 | repos_list[name] = klass(path[1]) | |
93 | except OSError: |
|
93 | except OSError: | |
94 | continue |
|
94 | continue | |
95 |
|
95 | |||
96 | return repos_list |
|
96 | return repos_list | |
97 |
|
97 | |||
98 | def get_repos(self, all_repos=None): |
|
98 | def get_repos(self, all_repos=None): | |
99 | """ |
|
99 | """ | |
100 | Get all repos from db and for each repo create it's backend instance. |
|
100 | Get all repos from db and for each repo create it's backend instance. | |
101 | and fill that backed with information from database |
|
101 | and fill that backed with information from database | |
102 |
|
102 | |||
103 | :param all_repos: give specific repositories list, good for filtering |
|
103 | :param all_repos: give specific repositories list, good for filtering | |
104 | """ |
|
104 | """ | |
105 | if not all_repos: |
|
105 | if not all_repos: | |
106 | all_repos = self.sa.query(Repository)\ |
|
106 | all_repos = self.sa.query(Repository)\ | |
107 | .order_by(Repository.repo_name).all() |
|
107 | .order_by(Repository.repo_name).all() | |
108 |
|
108 | |||
|
109 | invalidation_list = [str(x.cache_key) for x in \ | |||
|
110 | self.sa.query(CacheInvalidation.cache_key)\ | |||
|
111 | .filter(CacheInvalidation.cache_active == False)\ | |||
|
112 | .all()] | |||
|
113 | ||||
109 | for r in all_repos: |
|
114 | for r in all_repos: | |
110 |
|
115 | |||
111 | repo = self.get(r.repo_name) |
|
116 | repo = self.get(r.repo_name, invalidation_list) | |
112 |
|
117 | |||
113 | if repo is not None: |
|
118 | if repo is not None: | |
114 | last_change = repo.last_change |
|
119 | last_change = repo.last_change | |
115 | tip = h.get_changeset_safe(repo, 'tip') |
|
120 | tip = h.get_changeset_safe(repo, 'tip') | |
116 |
|
121 | |||
117 | tmp_d = {} |
|
122 | tmp_d = {} | |
118 | tmp_d['name'] = repo.name |
|
123 | tmp_d['name'] = repo.name | |
119 | tmp_d['name_sort'] = tmp_d['name'].lower() |
|
124 | tmp_d['name_sort'] = tmp_d['name'].lower() | |
120 | tmp_d['description'] = repo.dbrepo.description |
|
125 | tmp_d['description'] = repo.dbrepo.description | |
121 | tmp_d['description_sort'] = tmp_d['description'] |
|
126 | tmp_d['description_sort'] = tmp_d['description'] | |
122 | tmp_d['last_change'] = last_change |
|
127 | tmp_d['last_change'] = last_change | |
123 | tmp_d['last_change_sort'] = time.mktime(last_change.timetuple()) |
|
128 | tmp_d['last_change_sort'] = time.mktime(last_change.timetuple()) | |
124 | tmp_d['tip'] = tip.raw_id |
|
129 | tmp_d['tip'] = tip.raw_id | |
125 | tmp_d['tip_sort'] = tip.revision |
|
130 | tmp_d['tip_sort'] = tip.revision | |
126 | tmp_d['rev'] = tip.revision |
|
131 | tmp_d['rev'] = tip.revision | |
127 | tmp_d['contact'] = repo.dbrepo.user.full_contact |
|
132 | tmp_d['contact'] = repo.dbrepo.user.full_contact | |
128 | tmp_d['contact_sort'] = tmp_d['contact'] |
|
133 | tmp_d['contact_sort'] = tmp_d['contact'] | |
129 | tmp_d['repo_archives'] = list(repo._get_archives()) |
|
134 | tmp_d['repo_archives'] = list(repo._get_archives()) | |
130 | tmp_d['last_msg'] = tip.message |
|
135 | tmp_d['last_msg'] = tip.message | |
131 | tmp_d['repo'] = repo |
|
136 | tmp_d['repo'] = repo | |
132 | yield tmp_d |
|
137 | yield tmp_d | |
133 |
|
138 | |||
134 | def get_repo(self, repo_name): |
|
139 | def get_repo(self, repo_name): | |
135 | return self.get(repo_name) |
|
140 | return self.get(repo_name) | |
136 |
|
141 | |||
137 | def get(self, repo_name): |
|
142 | def get(self, repo_name, invalidation_list=None): | |
138 | """ |
|
143 | """ | |
139 | Get's repository from given name, creates BackendInstance and |
|
144 | Get's repository from given name, creates BackendInstance and | |
140 | propagates it's data from database with all additional information |
|
145 | propagates it's data from database with all additional information | |
141 | :param repo_name: |
|
146 | :param repo_name: | |
142 | """ |
|
147 | """ | |
143 | if not HasRepoPermissionAny('repository.read', 'repository.write', |
|
148 | if not HasRepoPermissionAny('repository.read', 'repository.write', | |
144 | 'repository.admin')(repo_name, 'get repo check'): |
|
149 | 'repository.admin')(repo_name, 'get repo check'): | |
145 | return |
|
150 | return | |
146 |
|
151 | |||
147 | @cache_region('long_term') |
|
152 | @cache_region('long_term') | |
148 | def _get_repo(repo_name): |
|
153 | def _get_repo(repo_name): | |
149 |
|
154 | |||
150 | repo_path = os.path.join(self.repos_path, repo_name) |
|
155 | repo_path = os.path.join(self.repos_path, repo_name) | |
151 | alias = get_scm(repo_path)[0] |
|
156 | alias = get_scm(repo_path)[0] | |
152 |
|
157 | |||
153 | log.debug('Creating instance of %s repository', alias) |
|
158 | log.debug('Creating instance of %s repository', alias) | |
154 | backend = get_backend(alias) |
|
159 | backend = get_backend(alias) | |
155 |
|
160 | |||
156 | #TODO: get the baseui from somewhere for this |
|
161 | #TODO: get the baseui from somewhere for this | |
157 | if alias == 'hg': |
|
162 | if alias == 'hg': | |
158 | from pylons import app_globals as g |
|
163 | from pylons import app_globals as g | |
159 | repo = backend(repo_path, create=False, baseui=g.baseui) |
|
164 | repo = backend(repo_path, create=False, baseui=g.baseui) | |
160 | #skip hidden web repository |
|
165 | #skip hidden web repository | |
161 | if repo._get_hidden(): |
|
166 | if repo._get_hidden(): | |
162 | return |
|
167 | return | |
163 | else: |
|
168 | else: | |
164 | repo = backend(repo_path, create=False) |
|
169 | repo = backend(repo_path, create=False) | |
165 |
|
170 | |||
166 | dbrepo = self.sa.query(Repository)\ |
|
171 | dbrepo = self.sa.query(Repository)\ | |
167 | .options(joinedload(Repository.fork))\ |
|
172 | .options(joinedload(Repository.fork))\ | |
168 | .options(joinedload(Repository.user))\ |
|
173 | .options(joinedload(Repository.user))\ | |
169 | .filter(Repository.repo_name == repo_name)\ |
|
174 | .filter(Repository.repo_name == repo_name)\ | |
170 | .scalar() |
|
175 | .scalar() | |
171 | make_transient(dbrepo) |
|
176 | make_transient(dbrepo) | |
172 | repo.dbrepo = dbrepo |
|
177 | repo.dbrepo = dbrepo | |
173 | return repo |
|
178 | return repo | |
174 |
|
179 | |||
|
180 | pre_invalidate = True | |||
|
181 | if invalidation_list: | |||
|
182 | pre_invalidate = repo_name in invalidation_list | |||
|
183 | ||||
|
184 | if pre_invalidate: | |||
175 | invalidate = self._should_invalidate(repo_name) |
|
185 | invalidate = self._should_invalidate(repo_name) | |
|
186 | ||||
176 | if invalidate: |
|
187 | if invalidate: | |
177 | log.info('invalidating cache for repository %s', repo_name) |
|
188 | log.info('invalidating cache for repository %s', repo_name) | |
178 | region_invalidate(_get_repo, None, repo_name) |
|
189 | region_invalidate(_get_repo, None, repo_name) | |
179 | self._mark_invalidated(invalidate) |
|
190 | self._mark_invalidated(invalidate) | |
180 |
|
191 | |||
181 | return _get_repo(repo_name) |
|
192 | return _get_repo(repo_name) | |
182 |
|
193 | |||
183 |
|
194 | |||
184 |
|
195 | |||
185 | def mark_for_invalidation(self, repo_name): |
|
196 | def mark_for_invalidation(self, repo_name): | |
186 | """ |
|
197 | """ | |
187 | Puts cache invalidation task into db for |
|
198 | Puts cache invalidation task into db for | |
188 | further global cache invalidation |
|
199 | further global cache invalidation | |
189 |
|
200 | |||
190 | :param repo_name: this repo that should invalidation take place |
|
201 | :param repo_name: this repo that should invalidation take place | |
191 | """ |
|
202 | """ | |
192 | log.debug('marking %s for invalidation', repo_name) |
|
203 | log.debug('marking %s for invalidation', repo_name) | |
193 | cache = self.sa.query(CacheInvalidation)\ |
|
204 | cache = self.sa.query(CacheInvalidation)\ | |
194 | .filter(CacheInvalidation.cache_key == repo_name).scalar() |
|
205 | .filter(CacheInvalidation.cache_key == repo_name).scalar() | |
195 |
|
206 | |||
196 | if cache: |
|
207 | if cache: | |
197 | #mark this cache as inactive |
|
208 | #mark this cache as inactive | |
198 | cache.cache_active = False |
|
209 | cache.cache_active = False | |
199 | else: |
|
210 | else: | |
200 | log.debug('cache key not found in invalidation db -> creating one') |
|
211 | log.debug('cache key not found in invalidation db -> creating one') | |
201 | cache = CacheInvalidation(repo_name) |
|
212 | cache = CacheInvalidation(repo_name) | |
202 |
|
213 | |||
203 | try: |
|
214 | try: | |
204 | self.sa.add(cache) |
|
215 | self.sa.add(cache) | |
205 | self.sa.commit() |
|
216 | self.sa.commit() | |
206 | except: |
|
217 | except: | |
207 | log.error(traceback.format_exc()) |
|
218 | log.error(traceback.format_exc()) | |
208 | self.sa.rollback() |
|
219 | self.sa.rollback() | |
209 |
|
220 | |||
210 |
|
221 | |||
211 |
|
222 | |||
212 |
|
223 | |||
213 |
|
224 | |||
214 | def _should_invalidate(self, repo_name): |
|
225 | def _should_invalidate(self, repo_name): | |
215 | """ |
|
226 | """ | |
216 | Looks up database for invalidation signals for this repo_name |
|
227 | Looks up database for invalidation signals for this repo_name | |
217 | :param repo_name: |
|
228 | :param repo_name: | |
218 | """ |
|
229 | """ | |
219 |
|
230 | |||
220 | ret = self.sa.query(CacheInvalidation)\ |
|
231 | ret = self.sa.query(CacheInvalidation)\ | |
221 | .options(FromCache('sql_cache_short', |
|
232 | .options(FromCache('sql_cache_short', | |
222 | 'get_invalidation_%s' % repo_name))\ |
|
233 | 'get_invalidation_%s' % repo_name))\ | |
223 | .filter(CacheInvalidation.cache_key == repo_name)\ |
|
234 | .filter(CacheInvalidation.cache_key == repo_name)\ | |
224 | .filter(CacheInvalidation.cache_active == False)\ |
|
235 | .filter(CacheInvalidation.cache_active == False)\ | |
225 | .scalar() |
|
236 | .scalar() | |
226 |
|
237 | |||
227 | return ret |
|
238 | return ret | |
228 |
|
239 | |||
229 | def _mark_invalidated(self, cache_key): |
|
240 | def _mark_invalidated(self, cache_key): | |
230 | """ |
|
241 | """ | |
231 | Marks all occurences of cache to invaldation as already invalidated |
|
242 | Marks all occurences of cache to invaldation as already invalidated | |
232 | @param repo_name: |
|
243 | @param repo_name: | |
233 | """ |
|
244 | """ | |
234 | if cache_key: |
|
245 | if cache_key: | |
235 | log.debug('marking %s as already invalidated', cache_key) |
|
246 | log.debug('marking %s as already invalidated', cache_key) | |
236 | try: |
|
247 | try: | |
237 | cache_key.cache_active = True |
|
248 | cache_key.cache_active = True | |
238 | self.sa.add(cache_key) |
|
249 | self.sa.add(cache_key) | |
239 | self.sa.commit() |
|
250 | self.sa.commit() | |
240 | except: |
|
251 | except: | |
241 | log.error(traceback.format_exc()) |
|
252 | log.error(traceback.format_exc()) | |
242 | self.sa.rollback() |
|
253 | self.sa.rollback() | |
243 |
|
254 |
General Comments 0
You need to be logged in to leave comments.
Login now