##// END OF EJS Templates
scm: clear the cs cache on delete/remap
dan -
r337:e35f8ec4 default
parent child Browse files
Show More
@@ -1,1105 +1,1101 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import re
26 import re
27 import sys
27 import sys
28 import time
28 import time
29 import traceback
29 import traceback
30 import logging
30 import logging
31 import cStringIO
31 import cStringIO
32 import pkg_resources
32 import pkg_resources
33
33
34 import pylons
34 import pylons
35 from pylons.i18n.translation import _
35 from pylons.i18n.translation import _
36 from sqlalchemy import func
36 from sqlalchemy import func
37 from zope.cachedescriptors.property import Lazy as LazyProperty
37 from zope.cachedescriptors.property import Lazy as LazyProperty
38
38
39 import rhodecode
39 import rhodecode
40 from rhodecode.lib.vcs import get_backend
40 from rhodecode.lib.vcs import get_backend
41 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
41 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
42 from rhodecode.lib.vcs.nodes import FileNode
42 from rhodecode.lib.vcs.nodes import FileNode
43 from rhodecode.lib.vcs.backends.base import EmptyCommit
43 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 from rhodecode.lib import helpers as h
44 from rhodecode.lib import helpers as h
45
45
46 from rhodecode.lib.auth import (
46 from rhodecode.lib.auth import (
47 HasRepoPermissionAny, HasRepoGroupPermissionAny,
47 HasRepoPermissionAny, HasRepoGroupPermissionAny,
48 HasUserGroupPermissionAny)
48 HasUserGroupPermissionAny)
49 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
49 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
50 from rhodecode.lib import hooks_utils, caches
50 from rhodecode.lib import hooks_utils, caches
51 from rhodecode.lib.utils import (
51 from rhodecode.lib.utils import (
52 get_filesystem_repos, action_logger, make_db_config)
52 get_filesystem_repos, action_logger, make_db_config)
53 from rhodecode.lib.utils2 import (
53 from rhodecode.lib.utils2 import (
54 safe_str, safe_unicode, get_server_url, md5)
54 safe_str, safe_unicode, get_server_url, md5)
55 from rhodecode.model import BaseModel
55 from rhodecode.model import BaseModel
56 from rhodecode.model.db import (
56 from rhodecode.model.db import (
57 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
57 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
58 PullRequest, DbMigrateVersion)
58 PullRequest, DbMigrateVersion)
59 from rhodecode.model.settings import VcsSettingsModel
59 from rhodecode.model.settings import VcsSettingsModel
60
60
61 log = logging.getLogger(__name__)
61 log = logging.getLogger(__name__)
62
62
63
63
64 class UserTemp(object):
64 class UserTemp(object):
65 def __init__(self, user_id):
65 def __init__(self, user_id):
66 self.user_id = user_id
66 self.user_id = user_id
67
67
68 def __repr__(self):
68 def __repr__(self):
69 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
69 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
70
70
71
71
72 class RepoTemp(object):
72 class RepoTemp(object):
73 def __init__(self, repo_id):
73 def __init__(self, repo_id):
74 self.repo_id = repo_id
74 self.repo_id = repo_id
75
75
76 def __repr__(self):
76 def __repr__(self):
77 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
77 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
78
78
79
79
80 class SimpleCachedRepoList(object):
80 class SimpleCachedRepoList(object):
81 """
81 """
82 Lighter version of of iteration of repos without the scm initialisation,
82 Lighter version of of iteration of repos without the scm initialisation,
83 and with cache usage
83 and with cache usage
84 """
84 """
85 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
85 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
86 self.db_repo_list = db_repo_list
86 self.db_repo_list = db_repo_list
87 self.repos_path = repos_path
87 self.repos_path = repos_path
88 self.order_by = order_by
88 self.order_by = order_by
89 self.reversed = (order_by or '').startswith('-')
89 self.reversed = (order_by or '').startswith('-')
90 if not perm_set:
90 if not perm_set:
91 perm_set = ['repository.read', 'repository.write',
91 perm_set = ['repository.read', 'repository.write',
92 'repository.admin']
92 'repository.admin']
93 self.perm_set = perm_set
93 self.perm_set = perm_set
94
94
95 def __len__(self):
95 def __len__(self):
96 return len(self.db_repo_list)
96 return len(self.db_repo_list)
97
97
98 def __repr__(self):
98 def __repr__(self):
99 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
99 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
100
100
101 def __iter__(self):
101 def __iter__(self):
102 for dbr in self.db_repo_list:
102 for dbr in self.db_repo_list:
103 # check permission at this level
103 # check permission at this level
104 has_perm = HasRepoPermissionAny(*self.perm_set)(
104 has_perm = HasRepoPermissionAny(*self.perm_set)(
105 dbr.repo_name, 'SimpleCachedRepoList check')
105 dbr.repo_name, 'SimpleCachedRepoList check')
106 if not has_perm:
106 if not has_perm:
107 continue
107 continue
108
108
109 tmp_d = {
109 tmp_d = {
110 'name': dbr.repo_name,
110 'name': dbr.repo_name,
111 'dbrepo': dbr.get_dict(),
111 'dbrepo': dbr.get_dict(),
112 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
112 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
113 }
113 }
114 yield tmp_d
114 yield tmp_d
115
115
116
116
117 class _PermCheckIterator(object):
117 class _PermCheckIterator(object):
118
118
119 def __init__(
119 def __init__(
120 self, obj_list, obj_attr, perm_set, perm_checker,
120 self, obj_list, obj_attr, perm_set, perm_checker,
121 extra_kwargs=None):
121 extra_kwargs=None):
122 """
122 """
123 Creates iterator from given list of objects, additionally
123 Creates iterator from given list of objects, additionally
124 checking permission for them from perm_set var
124 checking permission for them from perm_set var
125
125
126 :param obj_list: list of db objects
126 :param obj_list: list of db objects
127 :param obj_attr: attribute of object to pass into perm_checker
127 :param obj_attr: attribute of object to pass into perm_checker
128 :param perm_set: list of permissions to check
128 :param perm_set: list of permissions to check
129 :param perm_checker: callable to check permissions against
129 :param perm_checker: callable to check permissions against
130 """
130 """
131 self.obj_list = obj_list
131 self.obj_list = obj_list
132 self.obj_attr = obj_attr
132 self.obj_attr = obj_attr
133 self.perm_set = perm_set
133 self.perm_set = perm_set
134 self.perm_checker = perm_checker
134 self.perm_checker = perm_checker
135 self.extra_kwargs = extra_kwargs or {}
135 self.extra_kwargs = extra_kwargs or {}
136
136
137 def __len__(self):
137 def __len__(self):
138 return len(self.obj_list)
138 return len(self.obj_list)
139
139
140 def __repr__(self):
140 def __repr__(self):
141 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
141 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
142
142
143 def __iter__(self):
143 def __iter__(self):
144 checker = self.perm_checker(*self.perm_set)
144 checker = self.perm_checker(*self.perm_set)
145 for db_obj in self.obj_list:
145 for db_obj in self.obj_list:
146 # check permission at this level
146 # check permission at this level
147 name = getattr(db_obj, self.obj_attr, None)
147 name = getattr(db_obj, self.obj_attr, None)
148 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
148 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
149 continue
149 continue
150
150
151 yield db_obj
151 yield db_obj
152
152
153
153
154 class RepoList(_PermCheckIterator):
154 class RepoList(_PermCheckIterator):
155
155
156 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
156 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
157 if not perm_set:
157 if not perm_set:
158 perm_set = [
158 perm_set = [
159 'repository.read', 'repository.write', 'repository.admin']
159 'repository.read', 'repository.write', 'repository.admin']
160
160
161 super(RepoList, self).__init__(
161 super(RepoList, self).__init__(
162 obj_list=db_repo_list,
162 obj_list=db_repo_list,
163 obj_attr='repo_name', perm_set=perm_set,
163 obj_attr='repo_name', perm_set=perm_set,
164 perm_checker=HasRepoPermissionAny,
164 perm_checker=HasRepoPermissionAny,
165 extra_kwargs=extra_kwargs)
165 extra_kwargs=extra_kwargs)
166
166
167
167
168 class RepoGroupList(_PermCheckIterator):
168 class RepoGroupList(_PermCheckIterator):
169
169
170 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
170 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
171 if not perm_set:
171 if not perm_set:
172 perm_set = ['group.read', 'group.write', 'group.admin']
172 perm_set = ['group.read', 'group.write', 'group.admin']
173
173
174 super(RepoGroupList, self).__init__(
174 super(RepoGroupList, self).__init__(
175 obj_list=db_repo_group_list,
175 obj_list=db_repo_group_list,
176 obj_attr='group_name', perm_set=perm_set,
176 obj_attr='group_name', perm_set=perm_set,
177 perm_checker=HasRepoGroupPermissionAny,
177 perm_checker=HasRepoGroupPermissionAny,
178 extra_kwargs=extra_kwargs)
178 extra_kwargs=extra_kwargs)
179
179
180
180
181 class UserGroupList(_PermCheckIterator):
181 class UserGroupList(_PermCheckIterator):
182
182
183 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
183 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
184 if not perm_set:
184 if not perm_set:
185 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
185 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
186
186
187 super(UserGroupList, self).__init__(
187 super(UserGroupList, self).__init__(
188 obj_list=db_user_group_list,
188 obj_list=db_user_group_list,
189 obj_attr='users_group_name', perm_set=perm_set,
189 obj_attr='users_group_name', perm_set=perm_set,
190 perm_checker=HasUserGroupPermissionAny,
190 perm_checker=HasUserGroupPermissionAny,
191 extra_kwargs=extra_kwargs)
191 extra_kwargs=extra_kwargs)
192
192
193
193
194 class ScmModel(BaseModel):
194 class ScmModel(BaseModel):
195 """
195 """
196 Generic Scm Model
196 Generic Scm Model
197 """
197 """
198
198
199 @LazyProperty
199 @LazyProperty
200 def repos_path(self):
200 def repos_path(self):
201 """
201 """
202 Gets the repositories root path from database
202 Gets the repositories root path from database
203 """
203 """
204
204
205 settings_model = VcsSettingsModel(sa=self.sa)
205 settings_model = VcsSettingsModel(sa=self.sa)
206 return settings_model.get_repos_location()
206 return settings_model.get_repos_location()
207
207
208 def repo_scan(self, repos_path=None):
208 def repo_scan(self, repos_path=None):
209 """
209 """
210 Listing of repositories in given path. This path should not be a
210 Listing of repositories in given path. This path should not be a
211 repository itself. Return a dictionary of repository objects
211 repository itself. Return a dictionary of repository objects
212
212
213 :param repos_path: path to directory containing repositories
213 :param repos_path: path to directory containing repositories
214 """
214 """
215
215
216 if repos_path is None:
216 if repos_path is None:
217 repos_path = self.repos_path
217 repos_path = self.repos_path
218
218
219 log.info('scanning for repositories in %s', repos_path)
219 log.info('scanning for repositories in %s', repos_path)
220
220
221 config = make_db_config()
221 config = make_db_config()
222 config.set('extensions', 'largefiles', '')
222 config.set('extensions', 'largefiles', '')
223 repos = {}
223 repos = {}
224
224
225 for name, path in get_filesystem_repos(repos_path, recursive=True):
225 for name, path in get_filesystem_repos(repos_path, recursive=True):
226 # name need to be decomposed and put back together using the /
226 # name need to be decomposed and put back together using the /
227 # since this is internal storage separator for rhodecode
227 # since this is internal storage separator for rhodecode
228 name = Repository.normalize_repo_name(name)
228 name = Repository.normalize_repo_name(name)
229
229
230 try:
230 try:
231 if name in repos:
231 if name in repos:
232 raise RepositoryError('Duplicate repository name %s '
232 raise RepositoryError('Duplicate repository name %s '
233 'found in %s' % (name, path))
233 'found in %s' % (name, path))
234 elif path[0] in rhodecode.BACKENDS:
234 elif path[0] in rhodecode.BACKENDS:
235 klass = get_backend(path[0])
235 klass = get_backend(path[0])
236 repos[name] = klass(path[1], config=config)
236 repos[name] = klass(path[1], config=config)
237 except OSError:
237 except OSError:
238 continue
238 continue
239 log.debug('found %s paths with repositories', len(repos))
239 log.debug('found %s paths with repositories', len(repos))
240 return repos
240 return repos
241
241
242 def get_repos(self, all_repos=None, sort_key=None):
242 def get_repos(self, all_repos=None, sort_key=None):
243 """
243 """
244 Get all repositories from db and for each repo create it's
244 Get all repositories from db and for each repo create it's
245 backend instance and fill that backed with information from database
245 backend instance and fill that backed with information from database
246
246
247 :param all_repos: list of repository names as strings
247 :param all_repos: list of repository names as strings
248 give specific repositories list, good for filtering
248 give specific repositories list, good for filtering
249
249
250 :param sort_key: initial sorting of repositories
250 :param sort_key: initial sorting of repositories
251 """
251 """
252 if all_repos is None:
252 if all_repos is None:
253 all_repos = self.sa.query(Repository)\
253 all_repos = self.sa.query(Repository)\
254 .filter(Repository.group_id == None)\
254 .filter(Repository.group_id == None)\
255 .order_by(func.lower(Repository.repo_name)).all()
255 .order_by(func.lower(Repository.repo_name)).all()
256 repo_iter = SimpleCachedRepoList(
256 repo_iter = SimpleCachedRepoList(
257 all_repos, repos_path=self.repos_path, order_by=sort_key)
257 all_repos, repos_path=self.repos_path, order_by=sort_key)
258 return repo_iter
258 return repo_iter
259
259
260 def get_repo_groups(self, all_groups=None):
260 def get_repo_groups(self, all_groups=None):
261 if all_groups is None:
261 if all_groups is None:
262 all_groups = RepoGroup.query()\
262 all_groups = RepoGroup.query()\
263 .filter(RepoGroup.group_parent_id == None).all()
263 .filter(RepoGroup.group_parent_id == None).all()
264 return [x for x in RepoGroupList(all_groups)]
264 return [x for x in RepoGroupList(all_groups)]
265
265
266 def mark_for_invalidation(self, repo_name, delete=False):
266 def mark_for_invalidation(self, repo_name, delete=False):
267 """
267 """
268 Mark caches of this repo invalid in the database. `delete` flag
268 Mark caches of this repo invalid in the database. `delete` flag
269 removes the cache entries
269 removes the cache entries
270
270
271 :param repo_name: the repo_name for which caches should be marked
271 :param repo_name: the repo_name for which caches should be marked
272 invalid, or deleted
272 invalid, or deleted
273 :param delete: delete the entry keys instead of setting bool
273 :param delete: delete the entry keys instead of setting bool
274 flag on them
274 flag on them
275 """
275 """
276 CacheKey.set_invalidate(repo_name, delete=delete)
276 CacheKey.set_invalidate(repo_name, delete=delete)
277 repo = Repository.get_by_repo_name(repo_name)
277 repo = Repository.get_by_repo_name(repo_name)
278
278
279 if repo:
279 if repo:
280 config = repo._config
280 config = repo._config
281 config.set('extensions', 'largefiles', '')
281 config.set('extensions', 'largefiles', '')
282 cs_cache = None
282 repo.update_commit_cache(config=config, cs_cache=None)
283 if delete:
284 # if we do a hard clear, reset last-commit to Empty
285 cs_cache = EmptyCommit()
286 repo.update_commit_cache(config=config, cs_cache=cs_cache)
287 caches.clear_repo_caches(repo_name)
283 caches.clear_repo_caches(repo_name)
288
284
289 def toggle_following_repo(self, follow_repo_id, user_id):
285 def toggle_following_repo(self, follow_repo_id, user_id):
290
286
291 f = self.sa.query(UserFollowing)\
287 f = self.sa.query(UserFollowing)\
292 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
293 .filter(UserFollowing.user_id == user_id).scalar()
289 .filter(UserFollowing.user_id == user_id).scalar()
294
290
295 if f is not None:
291 if f is not None:
296 try:
292 try:
297 self.sa.delete(f)
293 self.sa.delete(f)
298 action_logger(UserTemp(user_id),
294 action_logger(UserTemp(user_id),
299 'stopped_following_repo',
295 'stopped_following_repo',
300 RepoTemp(follow_repo_id))
296 RepoTemp(follow_repo_id))
301 return
297 return
302 except Exception:
298 except Exception:
303 log.error(traceback.format_exc())
299 log.error(traceback.format_exc())
304 raise
300 raise
305
301
306 try:
302 try:
307 f = UserFollowing()
303 f = UserFollowing()
308 f.user_id = user_id
304 f.user_id = user_id
309 f.follows_repo_id = follow_repo_id
305 f.follows_repo_id = follow_repo_id
310 self.sa.add(f)
306 self.sa.add(f)
311
307
312 action_logger(UserTemp(user_id),
308 action_logger(UserTemp(user_id),
313 'started_following_repo',
309 'started_following_repo',
314 RepoTemp(follow_repo_id))
310 RepoTemp(follow_repo_id))
315 except Exception:
311 except Exception:
316 log.error(traceback.format_exc())
312 log.error(traceback.format_exc())
317 raise
313 raise
318
314
319 def toggle_following_user(self, follow_user_id, user_id):
315 def toggle_following_user(self, follow_user_id, user_id):
320 f = self.sa.query(UserFollowing)\
316 f = self.sa.query(UserFollowing)\
321 .filter(UserFollowing.follows_user_id == follow_user_id)\
317 .filter(UserFollowing.follows_user_id == follow_user_id)\
322 .filter(UserFollowing.user_id == user_id).scalar()
318 .filter(UserFollowing.user_id == user_id).scalar()
323
319
324 if f is not None:
320 if f is not None:
325 try:
321 try:
326 self.sa.delete(f)
322 self.sa.delete(f)
327 return
323 return
328 except Exception:
324 except Exception:
329 log.error(traceback.format_exc())
325 log.error(traceback.format_exc())
330 raise
326 raise
331
327
332 try:
328 try:
333 f = UserFollowing()
329 f = UserFollowing()
334 f.user_id = user_id
330 f.user_id = user_id
335 f.follows_user_id = follow_user_id
331 f.follows_user_id = follow_user_id
336 self.sa.add(f)
332 self.sa.add(f)
337 except Exception:
333 except Exception:
338 log.error(traceback.format_exc())
334 log.error(traceback.format_exc())
339 raise
335 raise
340
336
341 def is_following_repo(self, repo_name, user_id, cache=False):
337 def is_following_repo(self, repo_name, user_id, cache=False):
342 r = self.sa.query(Repository)\
338 r = self.sa.query(Repository)\
343 .filter(Repository.repo_name == repo_name).scalar()
339 .filter(Repository.repo_name == repo_name).scalar()
344
340
345 f = self.sa.query(UserFollowing)\
341 f = self.sa.query(UserFollowing)\
346 .filter(UserFollowing.follows_repository == r)\
342 .filter(UserFollowing.follows_repository == r)\
347 .filter(UserFollowing.user_id == user_id).scalar()
343 .filter(UserFollowing.user_id == user_id).scalar()
348
344
349 return f is not None
345 return f is not None
350
346
351 def is_following_user(self, username, user_id, cache=False):
347 def is_following_user(self, username, user_id, cache=False):
352 u = User.get_by_username(username)
348 u = User.get_by_username(username)
353
349
354 f = self.sa.query(UserFollowing)\
350 f = self.sa.query(UserFollowing)\
355 .filter(UserFollowing.follows_user == u)\
351 .filter(UserFollowing.follows_user == u)\
356 .filter(UserFollowing.user_id == user_id).scalar()
352 .filter(UserFollowing.user_id == user_id).scalar()
357
353
358 return f is not None
354 return f is not None
359
355
360 def get_followers(self, repo):
356 def get_followers(self, repo):
361 repo = self._get_repo(repo)
357 repo = self._get_repo(repo)
362
358
363 return self.sa.query(UserFollowing)\
359 return self.sa.query(UserFollowing)\
364 .filter(UserFollowing.follows_repository == repo).count()
360 .filter(UserFollowing.follows_repository == repo).count()
365
361
366 def get_forks(self, repo):
362 def get_forks(self, repo):
367 repo = self._get_repo(repo)
363 repo = self._get_repo(repo)
368 return self.sa.query(Repository)\
364 return self.sa.query(Repository)\
369 .filter(Repository.fork == repo).count()
365 .filter(Repository.fork == repo).count()
370
366
371 def get_pull_requests(self, repo):
367 def get_pull_requests(self, repo):
372 repo = self._get_repo(repo)
368 repo = self._get_repo(repo)
373 return self.sa.query(PullRequest)\
369 return self.sa.query(PullRequest)\
374 .filter(PullRequest.target_repo == repo)\
370 .filter(PullRequest.target_repo == repo)\
375 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
371 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
376
372
377 def mark_as_fork(self, repo, fork, user):
373 def mark_as_fork(self, repo, fork, user):
378 repo = self._get_repo(repo)
374 repo = self._get_repo(repo)
379 fork = self._get_repo(fork)
375 fork = self._get_repo(fork)
380 if fork and repo.repo_id == fork.repo_id:
376 if fork and repo.repo_id == fork.repo_id:
381 raise Exception("Cannot set repository as fork of itself")
377 raise Exception("Cannot set repository as fork of itself")
382
378
383 if fork and repo.repo_type != fork.repo_type:
379 if fork and repo.repo_type != fork.repo_type:
384 raise RepositoryError(
380 raise RepositoryError(
385 "Cannot set repository as fork of repository with other type")
381 "Cannot set repository as fork of repository with other type")
386
382
387 repo.fork = fork
383 repo.fork = fork
388 self.sa.add(repo)
384 self.sa.add(repo)
389 return repo
385 return repo
390
386
391 def pull_changes(self, repo, username):
387 def pull_changes(self, repo, username):
392 dbrepo = self._get_repo(repo)
388 dbrepo = self._get_repo(repo)
393 clone_uri = dbrepo.clone_uri
389 clone_uri = dbrepo.clone_uri
394 if not clone_uri:
390 if not clone_uri:
395 raise Exception("This repository doesn't have a clone uri")
391 raise Exception("This repository doesn't have a clone uri")
396
392
397 repo = dbrepo.scm_instance(cache=False)
393 repo = dbrepo.scm_instance(cache=False)
398 # TODO: marcink fix this an re-enable since we need common logic
394 # TODO: marcink fix this an re-enable since we need common logic
399 # for hg/git remove hooks so we don't trigger them on fetching
395 # for hg/git remove hooks so we don't trigger them on fetching
400 # commits from remote
396 # commits from remote
401 repo.config.clear_section('hooks')
397 repo.config.clear_section('hooks')
402
398
403 repo_name = dbrepo.repo_name
399 repo_name = dbrepo.repo_name
404 try:
400 try:
405 # TODO: we need to make sure those operations call proper hooks !
401 # TODO: we need to make sure those operations call proper hooks !
406 repo.pull(clone_uri)
402 repo.pull(clone_uri)
407
403
408 self.mark_for_invalidation(repo_name)
404 self.mark_for_invalidation(repo_name)
409 except Exception:
405 except Exception:
410 log.error(traceback.format_exc())
406 log.error(traceback.format_exc())
411 raise
407 raise
412
408
413 def commit_change(self, repo, repo_name, commit, user, author, message,
409 def commit_change(self, repo, repo_name, commit, user, author, message,
414 content, f_path):
410 content, f_path):
415 """
411 """
416 Commits changes
412 Commits changes
417
413
418 :param repo: SCM instance
414 :param repo: SCM instance
419
415
420 """
416 """
421 user = self._get_user(user)
417 user = self._get_user(user)
422
418
423 # decoding here will force that we have proper encoded values
419 # decoding here will force that we have proper encoded values
424 # in any other case this will throw exceptions and deny commit
420 # in any other case this will throw exceptions and deny commit
425 content = safe_str(content)
421 content = safe_str(content)
426 path = safe_str(f_path)
422 path = safe_str(f_path)
427 # message and author needs to be unicode
423 # message and author needs to be unicode
428 # proper backend should then translate that into required type
424 # proper backend should then translate that into required type
429 message = safe_unicode(message)
425 message = safe_unicode(message)
430 author = safe_unicode(author)
426 author = safe_unicode(author)
431 imc = repo.in_memory_commit
427 imc = repo.in_memory_commit
432 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
428 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
433 try:
429 try:
434 # TODO: handle pre-push action !
430 # TODO: handle pre-push action !
435 tip = imc.commit(
431 tip = imc.commit(
436 message=message, author=author, parents=[commit],
432 message=message, author=author, parents=[commit],
437 branch=commit.branch)
433 branch=commit.branch)
438 except Exception as e:
434 except Exception as e:
439 log.error(traceback.format_exc())
435 log.error(traceback.format_exc())
440 raise IMCCommitError(str(e))
436 raise IMCCommitError(str(e))
441 finally:
437 finally:
442 # always clear caches, if commit fails we want fresh object also
438 # always clear caches, if commit fails we want fresh object also
443 self.mark_for_invalidation(repo_name)
439 self.mark_for_invalidation(repo_name)
444
440
445 # We trigger the post-push action
441 # We trigger the post-push action
446 hooks_utils.trigger_post_push_hook(
442 hooks_utils.trigger_post_push_hook(
447 username=user.username, action='push_local', repo_name=repo_name,
443 username=user.username, action='push_local', repo_name=repo_name,
448 repo_alias=repo.alias, commit_ids=[tip.raw_id])
444 repo_alias=repo.alias, commit_ids=[tip.raw_id])
449 return tip
445 return tip
450
446
451 def _sanitize_path(self, f_path):
447 def _sanitize_path(self, f_path):
452 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
448 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
453 raise NonRelativePathError('%s is not an relative path' % f_path)
449 raise NonRelativePathError('%s is not an relative path' % f_path)
454 if f_path:
450 if f_path:
455 f_path = os.path.normpath(f_path)
451 f_path = os.path.normpath(f_path)
456 return f_path
452 return f_path
457
453
458 def get_dirnode_metadata(self, commit, dir_node):
454 def get_dirnode_metadata(self, commit, dir_node):
459 if not dir_node.is_dir():
455 if not dir_node.is_dir():
460 return []
456 return []
461
457
462 data = []
458 data = []
463 for node in dir_node:
459 for node in dir_node:
464 if not node.is_file():
460 if not node.is_file():
465 # we skip file-nodes
461 # we skip file-nodes
466 continue
462 continue
467
463
468 last_commit = node.last_commit
464 last_commit = node.last_commit
469 last_commit_date = last_commit.date
465 last_commit_date = last_commit.date
470 data.append({
466 data.append({
471 'name': node.name,
467 'name': node.name,
472 'size': h.format_byte_size_binary(node.size),
468 'size': h.format_byte_size_binary(node.size),
473 'modified_at': h.format_date(last_commit_date),
469 'modified_at': h.format_date(last_commit_date),
474 'modified_ts': last_commit_date.isoformat(),
470 'modified_ts': last_commit_date.isoformat(),
475 'revision': last_commit.revision,
471 'revision': last_commit.revision,
476 'short_id': last_commit.short_id,
472 'short_id': last_commit.short_id,
477 'message': h.escape(last_commit.message),
473 'message': h.escape(last_commit.message),
478 'author': h.escape(last_commit.author),
474 'author': h.escape(last_commit.author),
479 'user_profile': h.gravatar_with_user(last_commit.author),
475 'user_profile': h.gravatar_with_user(last_commit.author),
480 })
476 })
481
477
482 return data
478 return data
483
479
484 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
480 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
485 extended_info=False, content=False):
481 extended_info=False, content=False):
486 """
482 """
487 recursive walk in root dir and return a set of all path in that dir
483 recursive walk in root dir and return a set of all path in that dir
488 based on repository walk function
484 based on repository walk function
489
485
490 :param repo_name: name of repository
486 :param repo_name: name of repository
491 :param commit_id: commit id for which to list nodes
487 :param commit_id: commit id for which to list nodes
492 :param root_path: root path to list
488 :param root_path: root path to list
493 :param flat: return as a list, if False returns a dict with decription
489 :param flat: return as a list, if False returns a dict with decription
494
490
495 """
491 """
496 _files = list()
492 _files = list()
497 _dirs = list()
493 _dirs = list()
498 try:
494 try:
499 _repo = self._get_repo(repo_name)
495 _repo = self._get_repo(repo_name)
500 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
496 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
501 root_path = root_path.lstrip('/')
497 root_path = root_path.lstrip('/')
502 for __, dirs, files in commit.walk(root_path):
498 for __, dirs, files in commit.walk(root_path):
503 for f in files:
499 for f in files:
504 _content = None
500 _content = None
505 _data = f.unicode_path
501 _data = f.unicode_path
506 if not flat:
502 if not flat:
507 _data = {
503 _data = {
508 "name": f.unicode_path,
504 "name": f.unicode_path,
509 "type": "file",
505 "type": "file",
510 }
506 }
511 if extended_info:
507 if extended_info:
512 _content = safe_str(f.content)
508 _content = safe_str(f.content)
513 _data.update({
509 _data.update({
514 "md5": md5(_content),
510 "md5": md5(_content),
515 "binary": f.is_binary,
511 "binary": f.is_binary,
516 "size": f.size,
512 "size": f.size,
517 "extension": f.extension,
513 "extension": f.extension,
518
514
519 "mimetype": f.mimetype,
515 "mimetype": f.mimetype,
520 "lines": f.lines()[0]
516 "lines": f.lines()[0]
521 })
517 })
522 if content:
518 if content:
523 full_content = None
519 full_content = None
524 if not f.is_binary:
520 if not f.is_binary:
525 # in case we loaded the _content already
521 # in case we loaded the _content already
526 # re-use it, or load from f[ile]
522 # re-use it, or load from f[ile]
527 full_content = _content or safe_str(f.content)
523 full_content = _content or safe_str(f.content)
528
524
529 _data.update({
525 _data.update({
530 "content": full_content
526 "content": full_content
531 })
527 })
532 _files.append(_data)
528 _files.append(_data)
533 for d in dirs:
529 for d in dirs:
534 _data = d.unicode_path
530 _data = d.unicode_path
535 if not flat:
531 if not flat:
536 _data = {
532 _data = {
537 "name": d.unicode_path,
533 "name": d.unicode_path,
538 "type": "dir",
534 "type": "dir",
539 }
535 }
540 if extended_info:
536 if extended_info:
541 _data.update({
537 _data.update({
542 "md5": None,
538 "md5": None,
543 "binary": None,
539 "binary": None,
544 "size": None,
540 "size": None,
545 "extension": None,
541 "extension": None,
546 })
542 })
547 if content:
543 if content:
548 _data.update({
544 _data.update({
549 "content": None
545 "content": None
550 })
546 })
551 _dirs.append(_data)
547 _dirs.append(_data)
552 except RepositoryError:
548 except RepositoryError:
553 log.debug("Exception in get_nodes", exc_info=True)
549 log.debug("Exception in get_nodes", exc_info=True)
554 raise
550 raise
555
551
556 return _dirs, _files
552 return _dirs, _files
557
553
558 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
554 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
559 author=None, trigger_push_hook=True):
555 author=None, trigger_push_hook=True):
560 """
556 """
561 Commits given multiple nodes into repo
557 Commits given multiple nodes into repo
562
558
563 :param user: RhodeCode User object or user_id, the commiter
559 :param user: RhodeCode User object or user_id, the commiter
564 :param repo: RhodeCode Repository object
560 :param repo: RhodeCode Repository object
565 :param message: commit message
561 :param message: commit message
566 :param nodes: mapping {filename:{'content':content},...}
562 :param nodes: mapping {filename:{'content':content},...}
567 :param parent_commit: parent commit, can be empty than it's
563 :param parent_commit: parent commit, can be empty than it's
568 initial commit
564 initial commit
569 :param author: author of commit, cna be different that commiter
565 :param author: author of commit, cna be different that commiter
570 only for git
566 only for git
571 :param trigger_push_hook: trigger push hooks
567 :param trigger_push_hook: trigger push hooks
572
568
573 :returns: new commited commit
569 :returns: new commited commit
574 """
570 """
575
571
576 user = self._get_user(user)
572 user = self._get_user(user)
577 scm_instance = repo.scm_instance(cache=False)
573 scm_instance = repo.scm_instance(cache=False)
578
574
579 processed_nodes = []
575 processed_nodes = []
580 for f_path in nodes:
576 for f_path in nodes:
581 f_path = self._sanitize_path(f_path)
577 f_path = self._sanitize_path(f_path)
582 content = nodes[f_path]['content']
578 content = nodes[f_path]['content']
583 f_path = safe_str(f_path)
579 f_path = safe_str(f_path)
584 # decoding here will force that we have proper encoded values
580 # decoding here will force that we have proper encoded values
585 # in any other case this will throw exceptions and deny commit
581 # in any other case this will throw exceptions and deny commit
586 if isinstance(content, (basestring,)):
582 if isinstance(content, (basestring,)):
587 content = safe_str(content)
583 content = safe_str(content)
588 elif isinstance(content, (file, cStringIO.OutputType,)):
584 elif isinstance(content, (file, cStringIO.OutputType,)):
589 content = content.read()
585 content = content.read()
590 else:
586 else:
591 raise Exception('Content is of unrecognized type %s' % (
587 raise Exception('Content is of unrecognized type %s' % (
592 type(content)
588 type(content)
593 ))
589 ))
594 processed_nodes.append((f_path, content))
590 processed_nodes.append((f_path, content))
595
591
596 message = safe_unicode(message)
592 message = safe_unicode(message)
597 commiter = user.full_contact
593 commiter = user.full_contact
598 author = safe_unicode(author) if author else commiter
594 author = safe_unicode(author) if author else commiter
599
595
600 imc = scm_instance.in_memory_commit
596 imc = scm_instance.in_memory_commit
601
597
602 if not parent_commit:
598 if not parent_commit:
603 parent_commit = EmptyCommit(alias=scm_instance.alias)
599 parent_commit = EmptyCommit(alias=scm_instance.alias)
604
600
605 if isinstance(parent_commit, EmptyCommit):
601 if isinstance(parent_commit, EmptyCommit):
606 # EmptyCommit means we we're editing empty repository
602 # EmptyCommit means we we're editing empty repository
607 parents = None
603 parents = None
608 else:
604 else:
609 parents = [parent_commit]
605 parents = [parent_commit]
610 # add multiple nodes
606 # add multiple nodes
611 for path, content in processed_nodes:
607 for path, content in processed_nodes:
612 imc.add(FileNode(path, content=content))
608 imc.add(FileNode(path, content=content))
613 # TODO: handle pre push scenario
609 # TODO: handle pre push scenario
614 tip = imc.commit(message=message,
610 tip = imc.commit(message=message,
615 author=author,
611 author=author,
616 parents=parents,
612 parents=parents,
617 branch=parent_commit.branch)
613 branch=parent_commit.branch)
618
614
619 self.mark_for_invalidation(repo.repo_name)
615 self.mark_for_invalidation(repo.repo_name)
620 if trigger_push_hook:
616 if trigger_push_hook:
621 hooks_utils.trigger_post_push_hook(
617 hooks_utils.trigger_post_push_hook(
622 username=user.username, action='push_local',
618 username=user.username, action='push_local',
623 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
619 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
624 commit_ids=[tip.raw_id])
620 commit_ids=[tip.raw_id])
625 return tip
621 return tip
626
622
627 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
623 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
628 author=None, trigger_push_hook=True):
624 author=None, trigger_push_hook=True):
629 user = self._get_user(user)
625 user = self._get_user(user)
630 scm_instance = repo.scm_instance(cache=False)
626 scm_instance = repo.scm_instance(cache=False)
631
627
632 message = safe_unicode(message)
628 message = safe_unicode(message)
633 commiter = user.full_contact
629 commiter = user.full_contact
634 author = safe_unicode(author) if author else commiter
630 author = safe_unicode(author) if author else commiter
635
631
636 imc = scm_instance.in_memory_commit
632 imc = scm_instance.in_memory_commit
637
633
638 if not parent_commit:
634 if not parent_commit:
639 parent_commit = EmptyCommit(alias=scm_instance.alias)
635 parent_commit = EmptyCommit(alias=scm_instance.alias)
640
636
641 if isinstance(parent_commit, EmptyCommit):
637 if isinstance(parent_commit, EmptyCommit):
642 # EmptyCommit means we we're editing empty repository
638 # EmptyCommit means we we're editing empty repository
643 parents = None
639 parents = None
644 else:
640 else:
645 parents = [parent_commit]
641 parents = [parent_commit]
646
642
647 # add multiple nodes
643 # add multiple nodes
648 for _filename, data in nodes.items():
644 for _filename, data in nodes.items():
649 # new filename, can be renamed from the old one, also sanitaze
645 # new filename, can be renamed from the old one, also sanitaze
650 # the path for any hack around relative paths like ../../ etc.
646 # the path for any hack around relative paths like ../../ etc.
651 filename = self._sanitize_path(data['filename'])
647 filename = self._sanitize_path(data['filename'])
652 old_filename = self._sanitize_path(_filename)
648 old_filename = self._sanitize_path(_filename)
653 content = data['content']
649 content = data['content']
654
650
655 filenode = FileNode(old_filename, content=content)
651 filenode = FileNode(old_filename, content=content)
656 op = data['op']
652 op = data['op']
657 if op == 'add':
653 if op == 'add':
658 imc.add(filenode)
654 imc.add(filenode)
659 elif op == 'del':
655 elif op == 'del':
660 imc.remove(filenode)
656 imc.remove(filenode)
661 elif op == 'mod':
657 elif op == 'mod':
662 if filename != old_filename:
658 if filename != old_filename:
663 # TODO: handle renames more efficient, needs vcs lib
659 # TODO: handle renames more efficient, needs vcs lib
664 # changes
660 # changes
665 imc.remove(filenode)
661 imc.remove(filenode)
666 imc.add(FileNode(filename, content=content))
662 imc.add(FileNode(filename, content=content))
667 else:
663 else:
668 imc.change(filenode)
664 imc.change(filenode)
669
665
670 try:
666 try:
671 # TODO: handle pre push scenario
667 # TODO: handle pre push scenario
672 # commit changes
668 # commit changes
673 tip = imc.commit(message=message,
669 tip = imc.commit(message=message,
674 author=author,
670 author=author,
675 parents=parents,
671 parents=parents,
676 branch=parent_commit.branch)
672 branch=parent_commit.branch)
677 except NodeNotChangedError:
673 except NodeNotChangedError:
678 raise
674 raise
679 except Exception as e:
675 except Exception as e:
680 log.exception("Unexpected exception during call to imc.commit")
676 log.exception("Unexpected exception during call to imc.commit")
681 raise IMCCommitError(str(e))
677 raise IMCCommitError(str(e))
682 finally:
678 finally:
683 # always clear caches, if commit fails we want fresh object also
679 # always clear caches, if commit fails we want fresh object also
684 self.mark_for_invalidation(repo.repo_name)
680 self.mark_for_invalidation(repo.repo_name)
685
681
686 if trigger_push_hook:
682 if trigger_push_hook:
687 hooks_utils.trigger_post_push_hook(
683 hooks_utils.trigger_post_push_hook(
688 username=user.username, action='push_local',
684 username=user.username, action='push_local',
689 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
685 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
690 commit_ids=[tip.raw_id])
686 commit_ids=[tip.raw_id])
691
687
692 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
688 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
693 author=None, trigger_push_hook=True):
689 author=None, trigger_push_hook=True):
694 """
690 """
695 Deletes given multiple nodes into `repo`
691 Deletes given multiple nodes into `repo`
696
692
697 :param user: RhodeCode User object or user_id, the committer
693 :param user: RhodeCode User object or user_id, the committer
698 :param repo: RhodeCode Repository object
694 :param repo: RhodeCode Repository object
699 :param message: commit message
695 :param message: commit message
700 :param nodes: mapping {filename:{'content':content},...}
696 :param nodes: mapping {filename:{'content':content},...}
701 :param parent_commit: parent commit, can be empty than it's initial
697 :param parent_commit: parent commit, can be empty than it's initial
702 commit
698 commit
703 :param author: author of commit, cna be different that commiter only
699 :param author: author of commit, cna be different that commiter only
704 for git
700 for git
705 :param trigger_push_hook: trigger push hooks
701 :param trigger_push_hook: trigger push hooks
706
702
707 :returns: new commit after deletion
703 :returns: new commit after deletion
708 """
704 """
709
705
710 user = self._get_user(user)
706 user = self._get_user(user)
711 scm_instance = repo.scm_instance(cache=False)
707 scm_instance = repo.scm_instance(cache=False)
712
708
713 processed_nodes = []
709 processed_nodes = []
714 for f_path in nodes:
710 for f_path in nodes:
715 f_path = self._sanitize_path(f_path)
711 f_path = self._sanitize_path(f_path)
716 # content can be empty but for compatabilty it allows same dicts
712 # content can be empty but for compatabilty it allows same dicts
717 # structure as add_nodes
713 # structure as add_nodes
718 content = nodes[f_path].get('content')
714 content = nodes[f_path].get('content')
719 processed_nodes.append((f_path, content))
715 processed_nodes.append((f_path, content))
720
716
721 message = safe_unicode(message)
717 message = safe_unicode(message)
722 commiter = user.full_contact
718 commiter = user.full_contact
723 author = safe_unicode(author) if author else commiter
719 author = safe_unicode(author) if author else commiter
724
720
725 imc = scm_instance.in_memory_commit
721 imc = scm_instance.in_memory_commit
726
722
727 if not parent_commit:
723 if not parent_commit:
728 parent_commit = EmptyCommit(alias=scm_instance.alias)
724 parent_commit = EmptyCommit(alias=scm_instance.alias)
729
725
730 if isinstance(parent_commit, EmptyCommit):
726 if isinstance(parent_commit, EmptyCommit):
731 # EmptyCommit means we we're editing empty repository
727 # EmptyCommit means we we're editing empty repository
732 parents = None
728 parents = None
733 else:
729 else:
734 parents = [parent_commit]
730 parents = [parent_commit]
735 # add multiple nodes
731 # add multiple nodes
736 for path, content in processed_nodes:
732 for path, content in processed_nodes:
737 imc.remove(FileNode(path, content=content))
733 imc.remove(FileNode(path, content=content))
738
734
739 # TODO: handle pre push scenario
735 # TODO: handle pre push scenario
740 tip = imc.commit(message=message,
736 tip = imc.commit(message=message,
741 author=author,
737 author=author,
742 parents=parents,
738 parents=parents,
743 branch=parent_commit.branch)
739 branch=parent_commit.branch)
744
740
745 self.mark_for_invalidation(repo.repo_name)
741 self.mark_for_invalidation(repo.repo_name)
746 if trigger_push_hook:
742 if trigger_push_hook:
747 hooks_utils.trigger_post_push_hook(
743 hooks_utils.trigger_post_push_hook(
748 username=user.username, action='push_local',
744 username=user.username, action='push_local',
749 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
745 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
750 commit_ids=[tip.raw_id])
746 commit_ids=[tip.raw_id])
751 return tip
747 return tip
752
748
753 def strip(self, repo, commit_id, branch):
749 def strip(self, repo, commit_id, branch):
754 scm_instance = repo.scm_instance(cache=False)
750 scm_instance = repo.scm_instance(cache=False)
755 scm_instance.config.clear_section('hooks')
751 scm_instance.config.clear_section('hooks')
756 scm_instance.strip(commit_id, branch)
752 scm_instance.strip(commit_id, branch)
757 self.mark_for_invalidation(repo.repo_name)
753 self.mark_for_invalidation(repo.repo_name)
758
754
759 def get_unread_journal(self):
755 def get_unread_journal(self):
760 return self.sa.query(UserLog).count()
756 return self.sa.query(UserLog).count()
761
757
762 def get_repo_landing_revs(self, repo=None):
758 def get_repo_landing_revs(self, repo=None):
763 """
759 """
764 Generates select option with tags branches and bookmarks (for hg only)
760 Generates select option with tags branches and bookmarks (for hg only)
765 grouped by type
761 grouped by type
766
762
767 :param repo:
763 :param repo:
768 """
764 """
769
765
770 hist_l = []
766 hist_l = []
771 choices = []
767 choices = []
772 repo = self._get_repo(repo)
768 repo = self._get_repo(repo)
773 hist_l.append(['rev:tip', _('latest tip')])
769 hist_l.append(['rev:tip', _('latest tip')])
774 choices.append('rev:tip')
770 choices.append('rev:tip')
775 if not repo:
771 if not repo:
776 return choices, hist_l
772 return choices, hist_l
777
773
778 repo = repo.scm_instance()
774 repo = repo.scm_instance()
779
775
780 branches_group = (
776 branches_group = (
781 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
777 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
782 for b in repo.branches],
778 for b in repo.branches],
783 _("Branches"))
779 _("Branches"))
784 hist_l.append(branches_group)
780 hist_l.append(branches_group)
785 choices.extend([x[0] for x in branches_group[0]])
781 choices.extend([x[0] for x in branches_group[0]])
786
782
787 if repo.alias == 'hg':
783 if repo.alias == 'hg':
788 bookmarks_group = (
784 bookmarks_group = (
789 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
785 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
790 for b in repo.bookmarks],
786 for b in repo.bookmarks],
791 _("Bookmarks"))
787 _("Bookmarks"))
792 hist_l.append(bookmarks_group)
788 hist_l.append(bookmarks_group)
793 choices.extend([x[0] for x in bookmarks_group[0]])
789 choices.extend([x[0] for x in bookmarks_group[0]])
794
790
795 tags_group = (
791 tags_group = (
796 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
792 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
797 for t in repo.tags],
793 for t in repo.tags],
798 _("Tags"))
794 _("Tags"))
799 hist_l.append(tags_group)
795 hist_l.append(tags_group)
800 choices.extend([x[0] for x in tags_group[0]])
796 choices.extend([x[0] for x in tags_group[0]])
801
797
802 return choices, hist_l
798 return choices, hist_l
803
799
804 def install_git_hook(self, repo, force_create=False):
800 def install_git_hook(self, repo, force_create=False):
805 """
801 """
806 Creates a rhodecode hook inside a git repository
802 Creates a rhodecode hook inside a git repository
807
803
808 :param repo: Instance of VCS repo
804 :param repo: Instance of VCS repo
809 :param force_create: Create even if same name hook exists
805 :param force_create: Create even if same name hook exists
810 """
806 """
811
807
812 loc = os.path.join(repo.path, 'hooks')
808 loc = os.path.join(repo.path, 'hooks')
813 if not repo.bare:
809 if not repo.bare:
814 loc = os.path.join(repo.path, '.git', 'hooks')
810 loc = os.path.join(repo.path, '.git', 'hooks')
815 if not os.path.isdir(loc):
811 if not os.path.isdir(loc):
816 os.makedirs(loc, mode=0777)
812 os.makedirs(loc, mode=0777)
817
813
818 tmpl_post = pkg_resources.resource_string(
814 tmpl_post = pkg_resources.resource_string(
819 'rhodecode', '/'.join(
815 'rhodecode', '/'.join(
820 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
816 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
821 tmpl_pre = pkg_resources.resource_string(
817 tmpl_pre = pkg_resources.resource_string(
822 'rhodecode', '/'.join(
818 'rhodecode', '/'.join(
823 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
819 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
824
820
825 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
821 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
826 _hook_file = os.path.join(loc, '%s-receive' % h_type)
822 _hook_file = os.path.join(loc, '%s-receive' % h_type)
827 log.debug('Installing git hook in repo %s', repo)
823 log.debug('Installing git hook in repo %s', repo)
828 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
824 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
829
825
830 if _rhodecode_hook or force_create:
826 if _rhodecode_hook or force_create:
831 log.debug('writing %s hook file !', h_type)
827 log.debug('writing %s hook file !', h_type)
832 try:
828 try:
833 with open(_hook_file, 'wb') as f:
829 with open(_hook_file, 'wb') as f:
834 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
830 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
835 tmpl = tmpl.replace('_ENV_', sys.executable)
831 tmpl = tmpl.replace('_ENV_', sys.executable)
836 f.write(tmpl)
832 f.write(tmpl)
837 os.chmod(_hook_file, 0755)
833 os.chmod(_hook_file, 0755)
838 except IOError:
834 except IOError:
839 log.exception('error writing hook file %s', _hook_file)
835 log.exception('error writing hook file %s', _hook_file)
840 else:
836 else:
841 log.debug('skipping writing hook file')
837 log.debug('skipping writing hook file')
842
838
843 def install_svn_hooks(self, repo, force_create=False):
839 def install_svn_hooks(self, repo, force_create=False):
844 """
840 """
845 Creates rhodecode hooks inside a svn repository
841 Creates rhodecode hooks inside a svn repository
846
842
847 :param repo: Instance of VCS repo
843 :param repo: Instance of VCS repo
848 :param force_create: Create even if same name hook exists
844 :param force_create: Create even if same name hook exists
849 """
845 """
850 hooks_path = os.path.join(repo.path, 'hooks')
846 hooks_path = os.path.join(repo.path, 'hooks')
851 if not os.path.isdir(hooks_path):
847 if not os.path.isdir(hooks_path):
852 os.makedirs(hooks_path)
848 os.makedirs(hooks_path)
853 post_commit_tmpl = pkg_resources.resource_string(
849 post_commit_tmpl = pkg_resources.resource_string(
854 'rhodecode', '/'.join(
850 'rhodecode', '/'.join(
855 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
851 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
856 pre_commit_template = pkg_resources.resource_string(
852 pre_commit_template = pkg_resources.resource_string(
857 'rhodecode', '/'.join(
853 'rhodecode', '/'.join(
858 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
854 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
859 templates = {
855 templates = {
860 'post-commit': post_commit_tmpl,
856 'post-commit': post_commit_tmpl,
861 'pre-commit': pre_commit_template
857 'pre-commit': pre_commit_template
862 }
858 }
863 for filename in templates:
859 for filename in templates:
864 _hook_file = os.path.join(hooks_path, filename)
860 _hook_file = os.path.join(hooks_path, filename)
865 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
861 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
866 if _rhodecode_hook or force_create:
862 if _rhodecode_hook or force_create:
867 log.debug('writing %s hook file !', filename)
863 log.debug('writing %s hook file !', filename)
868 template = templates[filename]
864 template = templates[filename]
869 try:
865 try:
870 with open(_hook_file, 'wb') as f:
866 with open(_hook_file, 'wb') as f:
871 template = template.replace(
867 template = template.replace(
872 '_TMPL_', rhodecode.__version__)
868 '_TMPL_', rhodecode.__version__)
873 template = template.replace('_ENV_', sys.executable)
869 template = template.replace('_ENV_', sys.executable)
874 f.write(template)
870 f.write(template)
875 os.chmod(_hook_file, 0755)
871 os.chmod(_hook_file, 0755)
876 except IOError:
872 except IOError:
877 log.exception('error writing hook file %s', filename)
873 log.exception('error writing hook file %s', filename)
878 else:
874 else:
879 log.debug('skipping writing hook file')
875 log.debug('skipping writing hook file')
880
876
881 def install_hooks(self, repo, repo_type):
877 def install_hooks(self, repo, repo_type):
882 if repo_type == 'git':
878 if repo_type == 'git':
883 self.install_git_hook(repo)
879 self.install_git_hook(repo)
884 elif repo_type == 'svn':
880 elif repo_type == 'svn':
885 self.install_svn_hooks(repo)
881 self.install_svn_hooks(repo)
886
882
887 def get_server_info(self, environ=None):
883 def get_server_info(self, environ=None):
888 import platform
884 import platform
889 import rhodecode
885 import rhodecode
890 import pkg_resources
886 import pkg_resources
891 from rhodecode.model.meta import Base as sql_base, Session
887 from rhodecode.model.meta import Base as sql_base, Session
892 from sqlalchemy.engine import url
888 from sqlalchemy.engine import url
893 from rhodecode.lib.base import get_server_ip_addr, get_server_port
889 from rhodecode.lib.base import get_server_ip_addr, get_server_port
894 from rhodecode.lib.vcs.backends.git import discover_git_version
890 from rhodecode.lib.vcs.backends.git import discover_git_version
895 from rhodecode.model.gist import GIST_STORE_LOC
891 from rhodecode.model.gist import GIST_STORE_LOC
896
892
897 try:
893 try:
898 # cygwin cannot have yet psutil support.
894 # cygwin cannot have yet psutil support.
899 import psutil
895 import psutil
900 except ImportError:
896 except ImportError:
901 psutil = None
897 psutil = None
902
898
903 environ = environ or {}
899 environ = environ or {}
904 _NA = 'NOT AVAILABLE'
900 _NA = 'NOT AVAILABLE'
905 _memory = _NA
901 _memory = _NA
906 _uptime = _NA
902 _uptime = _NA
907 _boot_time = _NA
903 _boot_time = _NA
908 _cpu = _NA
904 _cpu = _NA
909 _disk = dict(percent=0, used=0, total=0, error='')
905 _disk = dict(percent=0, used=0, total=0, error='')
910 _load = {'1_min': _NA, '5_min': _NA, '15_min': _NA}
906 _load = {'1_min': _NA, '5_min': _NA, '15_min': _NA}
911
907
912 model = VcsSettingsModel()
908 model = VcsSettingsModel()
913 storage_path = model.get_repos_location()
909 storage_path = model.get_repos_location()
914 gist_storage_path = os.path.join(storage_path, GIST_STORE_LOC)
910 gist_storage_path = os.path.join(storage_path, GIST_STORE_LOC)
915 archive_storage_path = rhodecode.CONFIG.get('archive_cache_dir', '')
911 archive_storage_path = rhodecode.CONFIG.get('archive_cache_dir', '')
916 search_index_storage_path = rhodecode.CONFIG.get('search.location', '')
912 search_index_storage_path = rhodecode.CONFIG.get('search.location', '')
917
913
918 if psutil:
914 if psutil:
919 # disk storage
915 # disk storage
920 try:
916 try:
921 _disk = dict(psutil.disk_usage(storage_path)._asdict())
917 _disk = dict(psutil.disk_usage(storage_path)._asdict())
922 except Exception as e:
918 except Exception as e:
923 log.exception('Failed to fetch disk info')
919 log.exception('Failed to fetch disk info')
924 _disk = {'percent': 0, 'used': 0, 'total': 0, 'error': str(e)}
920 _disk = {'percent': 0, 'used': 0, 'total': 0, 'error': str(e)}
925
921
926 # memory
922 # memory
927 _memory = dict(psutil.virtual_memory()._asdict())
923 _memory = dict(psutil.virtual_memory()._asdict())
928 _memory['percent2'] = psutil._common.usage_percent(
924 _memory['percent2'] = psutil._common.usage_percent(
929 (_memory['total'] - _memory['free']),
925 (_memory['total'] - _memory['free']),
930 _memory['total'], 1)
926 _memory['total'], 1)
931
927
932 # load averages
928 # load averages
933 if hasattr(psutil.os, 'getloadavg'):
929 if hasattr(psutil.os, 'getloadavg'):
934 _load = dict(zip(
930 _load = dict(zip(
935 ['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
931 ['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
936 _uptime = time.time() - psutil.boot_time()
932 _uptime = time.time() - psutil.boot_time()
937 _boot_time = psutil.boot_time()
933 _boot_time = psutil.boot_time()
938 _cpu = psutil.cpu_percent(0.5)
934 _cpu = psutil.cpu_percent(0.5)
939
935
940 mods = dict([(p.project_name, p.version)
936 mods = dict([(p.project_name, p.version)
941 for p in pkg_resources.working_set])
937 for p in pkg_resources.working_set])
942
938
943 def get_storage_size(storage_path):
939 def get_storage_size(storage_path):
944 sizes = []
940 sizes = []
945 for file_ in os.listdir(storage_path):
941 for file_ in os.listdir(storage_path):
946 storage_file = os.path.join(storage_path, file_)
942 storage_file = os.path.join(storage_path, file_)
947 if os.path.isfile(storage_file):
943 if os.path.isfile(storage_file):
948 try:
944 try:
949 sizes.append(os.path.getsize(storage_file))
945 sizes.append(os.path.getsize(storage_file))
950 except OSError:
946 except OSError:
951 log.exception('Failed to get size of storage file %s',
947 log.exception('Failed to get size of storage file %s',
952 storage_file)
948 storage_file)
953 pass
949 pass
954
950
955 return sum(sizes)
951 return sum(sizes)
956
952
957 # archive cache storage
953 # archive cache storage
958 _disk_archive = {'percent': 0, 'used': 0, 'total': 0}
954 _disk_archive = {'percent': 0, 'used': 0, 'total': 0}
959 try:
955 try:
960 archive_storage_path_exists = os.path.isdir(
956 archive_storage_path_exists = os.path.isdir(
961 archive_storage_path)
957 archive_storage_path)
962 if archive_storage_path and archive_storage_path_exists:
958 if archive_storage_path and archive_storage_path_exists:
963 used = get_storage_size(archive_storage_path)
959 used = get_storage_size(archive_storage_path)
964 _disk_archive.update({
960 _disk_archive.update({
965 'used': used,
961 'used': used,
966 'total': used,
962 'total': used,
967 })
963 })
968 except Exception as e:
964 except Exception as e:
969 log.exception('failed to fetch archive cache storage')
965 log.exception('failed to fetch archive cache storage')
970 _disk_archive['error'] = str(e)
966 _disk_archive['error'] = str(e)
971
967
972 # search index storage
968 # search index storage
973 _disk_index = {'percent': 0, 'used': 0, 'total': 0}
969 _disk_index = {'percent': 0, 'used': 0, 'total': 0}
974 try:
970 try:
975 search_index_storage_path_exists = os.path.isdir(
971 search_index_storage_path_exists = os.path.isdir(
976 search_index_storage_path)
972 search_index_storage_path)
977 if search_index_storage_path_exists:
973 if search_index_storage_path_exists:
978 used = get_storage_size(search_index_storage_path)
974 used = get_storage_size(search_index_storage_path)
979 _disk_index.update({
975 _disk_index.update({
980 'percent': 100,
976 'percent': 100,
981 'used': used,
977 'used': used,
982 'total': used,
978 'total': used,
983 })
979 })
984 except Exception as e:
980 except Exception as e:
985 log.exception('failed to fetch search index storage')
981 log.exception('failed to fetch search index storage')
986 _disk_index['error'] = str(e)
982 _disk_index['error'] = str(e)
987
983
988 # gist storage
984 # gist storage
989 _disk_gist = {'percent': 0, 'used': 0, 'total': 0, 'items': 0}
985 _disk_gist = {'percent': 0, 'used': 0, 'total': 0, 'items': 0}
990 try:
986 try:
991 items_count = 0
987 items_count = 0
992 used = 0
988 used = 0
993 for root, dirs, files in os.walk(safe_str(gist_storage_path)):
989 for root, dirs, files in os.walk(safe_str(gist_storage_path)):
994 if root == gist_storage_path:
990 if root == gist_storage_path:
995 items_count = len(dirs)
991 items_count = len(dirs)
996
992
997 for f in files:
993 for f in files:
998 try:
994 try:
999 used += os.path.getsize(os.path.join(root, f))
995 used += os.path.getsize(os.path.join(root, f))
1000 except OSError:
996 except OSError:
1001 pass
997 pass
1002 _disk_gist.update({
998 _disk_gist.update({
1003 'percent': 100,
999 'percent': 100,
1004 'used': used,
1000 'used': used,
1005 'total': used,
1001 'total': used,
1006 'items': items_count
1002 'items': items_count
1007 })
1003 })
1008 except Exception as e:
1004 except Exception as e:
1009 log.exception('failed to fetch gist storage items')
1005 log.exception('failed to fetch gist storage items')
1010 _disk_gist['error'] = str(e)
1006 _disk_gist['error'] = str(e)
1011
1007
1012 # GIT info
1008 # GIT info
1013 git_ver = discover_git_version()
1009 git_ver = discover_git_version()
1014
1010
1015 # SVN info
1011 # SVN info
1016 # TODO: johbo: Add discover_svn_version to replace this code.
1012 # TODO: johbo: Add discover_svn_version to replace this code.
1017 try:
1013 try:
1018 import svn.core
1014 import svn.core
1019 svn_ver = svn.core.SVN_VERSION
1015 svn_ver = svn.core.SVN_VERSION
1020 except ImportError:
1016 except ImportError:
1021 svn_ver = None
1017 svn_ver = None
1022
1018
1023 # DB stuff
1019 # DB stuff
1024 db_info = url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
1020 db_info = url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
1025 db_type = db_info.__to_string__()
1021 db_type = db_info.__to_string__()
1026 try:
1022 try:
1027 engine = sql_base.metadata.bind
1023 engine = sql_base.metadata.bind
1028 db_server_info = engine.dialect._get_server_version_info(
1024 db_server_info = engine.dialect._get_server_version_info(
1029 Session.connection(bind=engine))
1025 Session.connection(bind=engine))
1030 db_version = '%s %s' % (db_info.drivername,
1026 db_version = '%s %s' % (db_info.drivername,
1031 '.'.join(map(str, db_server_info)))
1027 '.'.join(map(str, db_server_info)))
1032 except Exception:
1028 except Exception:
1033 log.exception('failed to fetch db version')
1029 log.exception('failed to fetch db version')
1034 db_version = '%s %s' % (db_info.drivername, '?')
1030 db_version = '%s %s' % (db_info.drivername, '?')
1035
1031
1036 db_migrate = DbMigrateVersion.query().filter(
1032 db_migrate = DbMigrateVersion.query().filter(
1037 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
1033 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
1038 db_migrate_version = db_migrate.version
1034 db_migrate_version = db_migrate.version
1039
1035
1040 info = {
1036 info = {
1041 'py_version': ' '.join(platform._sys_version()),
1037 'py_version': ' '.join(platform._sys_version()),
1042 'py_path': sys.executable,
1038 'py_path': sys.executable,
1043 'py_modules': sorted(mods.items(), key=lambda k: k[0].lower()),
1039 'py_modules': sorted(mods.items(), key=lambda k: k[0].lower()),
1044
1040
1045 'platform': safe_unicode(platform.platform()),
1041 'platform': safe_unicode(platform.platform()),
1046 'storage': storage_path,
1042 'storage': storage_path,
1047 'archive_storage': archive_storage_path,
1043 'archive_storage': archive_storage_path,
1048 'index_storage': search_index_storage_path,
1044 'index_storage': search_index_storage_path,
1049 'gist_storage': gist_storage_path,
1045 'gist_storage': gist_storage_path,
1050
1046
1051
1047
1052 'db_type': db_type,
1048 'db_type': db_type,
1053 'db_version': db_version,
1049 'db_version': db_version,
1054 'db_migrate_version': db_migrate_version,
1050 'db_migrate_version': db_migrate_version,
1055
1051
1056 'rhodecode_version': rhodecode.__version__,
1052 'rhodecode_version': rhodecode.__version__,
1057 'rhodecode_config_ini': rhodecode.CONFIG.get('__file__'),
1053 'rhodecode_config_ini': rhodecode.CONFIG.get('__file__'),
1058 'server_ip': '%s:%s' % (
1054 'server_ip': '%s:%s' % (
1059 get_server_ip_addr(environ, log_errors=False),
1055 get_server_ip_addr(environ, log_errors=False),
1060 get_server_port(environ)
1056 get_server_port(environ)
1061 ),
1057 ),
1062 'server_id': rhodecode.CONFIG.get('instance_id'),
1058 'server_id': rhodecode.CONFIG.get('instance_id'),
1063
1059
1064 'git_version': safe_unicode(git_ver),
1060 'git_version': safe_unicode(git_ver),
1065 'hg_version': mods.get('mercurial'),
1061 'hg_version': mods.get('mercurial'),
1066 'svn_version': svn_ver,
1062 'svn_version': svn_ver,
1067
1063
1068 'uptime': _uptime,
1064 'uptime': _uptime,
1069 'boot_time': _boot_time,
1065 'boot_time': _boot_time,
1070 'load': _load,
1066 'load': _load,
1071 'cpu': _cpu,
1067 'cpu': _cpu,
1072 'memory': _memory,
1068 'memory': _memory,
1073 'disk': _disk,
1069 'disk': _disk,
1074 'disk_archive': _disk_archive,
1070 'disk_archive': _disk_archive,
1075 'disk_gist': _disk_gist,
1071 'disk_gist': _disk_gist,
1076 'disk_index': _disk_index,
1072 'disk_index': _disk_index,
1077 }
1073 }
1078 return info
1074 return info
1079
1075
1080
1076
1081 def _check_rhodecode_hook(hook_path):
1077 def _check_rhodecode_hook(hook_path):
1082 """
1078 """
1083 Check if the hook was created by RhodeCode
1079 Check if the hook was created by RhodeCode
1084 """
1080 """
1085 if not os.path.exists(hook_path):
1081 if not os.path.exists(hook_path):
1086 return True
1082 return True
1087
1083
1088 log.debug('hook exists, checking if it is from rhodecode')
1084 log.debug('hook exists, checking if it is from rhodecode')
1089 hook_content = _read_hook(hook_path)
1085 hook_content = _read_hook(hook_path)
1090 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
1086 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
1091 if matches:
1087 if matches:
1092 try:
1088 try:
1093 version = matches.groups()[0]
1089 version = matches.groups()[0]
1094 log.debug('got %s, it is rhodecode', version)
1090 log.debug('got %s, it is rhodecode', version)
1095 return True
1091 return True
1096 except Exception:
1092 except Exception:
1097 log.exception("Exception while reading the hook version.")
1093 log.exception("Exception while reading the hook version.")
1098
1094
1099 return False
1095 return False
1100
1096
1101
1097
1102 def _read_hook(hook_path):
1098 def _read_hook(hook_path):
1103 with open(hook_path, 'rb') as f:
1099 with open(hook_path, 'rb') as f:
1104 content = f.read()
1100 content = f.read()
1105 return content
1101 return content
General Comments 0
You need to be logged in to leave comments. Login now