##// END OF EJS Templates
system-info: fix possible float division by zero in calculating % value
marcink -
r1088:7d451b5a default
parent child Browse files
Show More
@@ -1,1120 +1,1123 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import re
26 import re
27 import sys
27 import sys
28 import time
28 import time
29 import traceback
29 import traceback
30 import logging
30 import logging
31 import cStringIO
31 import cStringIO
32 import pkg_resources
32 import pkg_resources
33
33
34 import pylons
34 import pylons
35 from pylons.i18n.translation import _
35 from pylons.i18n.translation import _
36 from sqlalchemy import func
36 from sqlalchemy import func
37 from zope.cachedescriptors.property import Lazy as LazyProperty
37 from zope.cachedescriptors.property import Lazy as LazyProperty
38
38
39 import rhodecode
39 import rhodecode
40 from rhodecode.lib.vcs import get_backend
40 from rhodecode.lib.vcs import get_backend
41 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
41 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
42 from rhodecode.lib.vcs.nodes import FileNode
42 from rhodecode.lib.vcs.nodes import FileNode
43 from rhodecode.lib.vcs.backends.base import EmptyCommit
43 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 from rhodecode.lib import helpers as h
44 from rhodecode.lib import helpers as h
45
45
46 from rhodecode.lib.auth import (
46 from rhodecode.lib.auth import (
47 HasRepoPermissionAny, HasRepoGroupPermissionAny,
47 HasRepoPermissionAny, HasRepoGroupPermissionAny,
48 HasUserGroupPermissionAny)
48 HasUserGroupPermissionAny)
49 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
49 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
50 from rhodecode.lib import hooks_utils, caches
50 from rhodecode.lib import hooks_utils, caches
51 from rhodecode.lib.utils import (
51 from rhodecode.lib.utils import (
52 get_filesystem_repos, action_logger, make_db_config)
52 get_filesystem_repos, action_logger, make_db_config)
53 from rhodecode.lib.utils2 import (
53 from rhodecode.lib.utils2 import (
54 safe_str, safe_unicode, get_server_url, md5)
54 safe_str, safe_unicode, get_server_url, md5)
55 from rhodecode.model import BaseModel
55 from rhodecode.model import BaseModel
56 from rhodecode.model.db import (
56 from rhodecode.model.db import (
57 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
57 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
58 PullRequest, DbMigrateVersion)
58 PullRequest, DbMigrateVersion)
59 from rhodecode.model.settings import VcsSettingsModel
59 from rhodecode.model.settings import VcsSettingsModel
60
60
61 log = logging.getLogger(__name__)
61 log = logging.getLogger(__name__)
62
62
63
63
64 class UserTemp(object):
64 class UserTemp(object):
65 def __init__(self, user_id):
65 def __init__(self, user_id):
66 self.user_id = user_id
66 self.user_id = user_id
67
67
68 def __repr__(self):
68 def __repr__(self):
69 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
69 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
70
70
71
71
72 class RepoTemp(object):
72 class RepoTemp(object):
73 def __init__(self, repo_id):
73 def __init__(self, repo_id):
74 self.repo_id = repo_id
74 self.repo_id = repo_id
75
75
76 def __repr__(self):
76 def __repr__(self):
77 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
77 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
78
78
79
79
80 class SimpleCachedRepoList(object):
80 class SimpleCachedRepoList(object):
81 """
81 """
82 Lighter version of of iteration of repos without the scm initialisation,
82 Lighter version of of iteration of repos without the scm initialisation,
83 and with cache usage
83 and with cache usage
84 """
84 """
85 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
85 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
86 self.db_repo_list = db_repo_list
86 self.db_repo_list = db_repo_list
87 self.repos_path = repos_path
87 self.repos_path = repos_path
88 self.order_by = order_by
88 self.order_by = order_by
89 self.reversed = (order_by or '').startswith('-')
89 self.reversed = (order_by or '').startswith('-')
90 if not perm_set:
90 if not perm_set:
91 perm_set = ['repository.read', 'repository.write',
91 perm_set = ['repository.read', 'repository.write',
92 'repository.admin']
92 'repository.admin']
93 self.perm_set = perm_set
93 self.perm_set = perm_set
94
94
95 def __len__(self):
95 def __len__(self):
96 return len(self.db_repo_list)
96 return len(self.db_repo_list)
97
97
98 def __repr__(self):
98 def __repr__(self):
99 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
99 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
100
100
101 def __iter__(self):
101 def __iter__(self):
102 for dbr in self.db_repo_list:
102 for dbr in self.db_repo_list:
103 # check permission at this level
103 # check permission at this level
104 has_perm = HasRepoPermissionAny(*self.perm_set)(
104 has_perm = HasRepoPermissionAny(*self.perm_set)(
105 dbr.repo_name, 'SimpleCachedRepoList check')
105 dbr.repo_name, 'SimpleCachedRepoList check')
106 if not has_perm:
106 if not has_perm:
107 continue
107 continue
108
108
109 tmp_d = {
109 tmp_d = {
110 'name': dbr.repo_name,
110 'name': dbr.repo_name,
111 'dbrepo': dbr.get_dict(),
111 'dbrepo': dbr.get_dict(),
112 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
112 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
113 }
113 }
114 yield tmp_d
114 yield tmp_d
115
115
116
116
117 class _PermCheckIterator(object):
117 class _PermCheckIterator(object):
118
118
119 def __init__(
119 def __init__(
120 self, obj_list, obj_attr, perm_set, perm_checker,
120 self, obj_list, obj_attr, perm_set, perm_checker,
121 extra_kwargs=None):
121 extra_kwargs=None):
122 """
122 """
123 Creates iterator from given list of objects, additionally
123 Creates iterator from given list of objects, additionally
124 checking permission for them from perm_set var
124 checking permission for them from perm_set var
125
125
126 :param obj_list: list of db objects
126 :param obj_list: list of db objects
127 :param obj_attr: attribute of object to pass into perm_checker
127 :param obj_attr: attribute of object to pass into perm_checker
128 :param perm_set: list of permissions to check
128 :param perm_set: list of permissions to check
129 :param perm_checker: callable to check permissions against
129 :param perm_checker: callable to check permissions against
130 """
130 """
131 self.obj_list = obj_list
131 self.obj_list = obj_list
132 self.obj_attr = obj_attr
132 self.obj_attr = obj_attr
133 self.perm_set = perm_set
133 self.perm_set = perm_set
134 self.perm_checker = perm_checker
134 self.perm_checker = perm_checker
135 self.extra_kwargs = extra_kwargs or {}
135 self.extra_kwargs = extra_kwargs or {}
136
136
137 def __len__(self):
137 def __len__(self):
138 return len(self.obj_list)
138 return len(self.obj_list)
139
139
140 def __repr__(self):
140 def __repr__(self):
141 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
141 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
142
142
143 def __iter__(self):
143 def __iter__(self):
144 checker = self.perm_checker(*self.perm_set)
144 checker = self.perm_checker(*self.perm_set)
145 for db_obj in self.obj_list:
145 for db_obj in self.obj_list:
146 # check permission at this level
146 # check permission at this level
147 name = getattr(db_obj, self.obj_attr, None)
147 name = getattr(db_obj, self.obj_attr, None)
148 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
148 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
149 continue
149 continue
150
150
151 yield db_obj
151 yield db_obj
152
152
153
153
154 class RepoList(_PermCheckIterator):
154 class RepoList(_PermCheckIterator):
155
155
156 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
156 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
157 if not perm_set:
157 if not perm_set:
158 perm_set = [
158 perm_set = [
159 'repository.read', 'repository.write', 'repository.admin']
159 'repository.read', 'repository.write', 'repository.admin']
160
160
161 super(RepoList, self).__init__(
161 super(RepoList, self).__init__(
162 obj_list=db_repo_list,
162 obj_list=db_repo_list,
163 obj_attr='repo_name', perm_set=perm_set,
163 obj_attr='repo_name', perm_set=perm_set,
164 perm_checker=HasRepoPermissionAny,
164 perm_checker=HasRepoPermissionAny,
165 extra_kwargs=extra_kwargs)
165 extra_kwargs=extra_kwargs)
166
166
167
167
168 class RepoGroupList(_PermCheckIterator):
168 class RepoGroupList(_PermCheckIterator):
169
169
170 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
170 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
171 if not perm_set:
171 if not perm_set:
172 perm_set = ['group.read', 'group.write', 'group.admin']
172 perm_set = ['group.read', 'group.write', 'group.admin']
173
173
174 super(RepoGroupList, self).__init__(
174 super(RepoGroupList, self).__init__(
175 obj_list=db_repo_group_list,
175 obj_list=db_repo_group_list,
176 obj_attr='group_name', perm_set=perm_set,
176 obj_attr='group_name', perm_set=perm_set,
177 perm_checker=HasRepoGroupPermissionAny,
177 perm_checker=HasRepoGroupPermissionAny,
178 extra_kwargs=extra_kwargs)
178 extra_kwargs=extra_kwargs)
179
179
180
180
181 class UserGroupList(_PermCheckIterator):
181 class UserGroupList(_PermCheckIterator):
182
182
183 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
183 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
184 if not perm_set:
184 if not perm_set:
185 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
185 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
186
186
187 super(UserGroupList, self).__init__(
187 super(UserGroupList, self).__init__(
188 obj_list=db_user_group_list,
188 obj_list=db_user_group_list,
189 obj_attr='users_group_name', perm_set=perm_set,
189 obj_attr='users_group_name', perm_set=perm_set,
190 perm_checker=HasUserGroupPermissionAny,
190 perm_checker=HasUserGroupPermissionAny,
191 extra_kwargs=extra_kwargs)
191 extra_kwargs=extra_kwargs)
192
192
193
193
194 class ScmModel(BaseModel):
194 class ScmModel(BaseModel):
195 """
195 """
196 Generic Scm Model
196 Generic Scm Model
197 """
197 """
198
198
199 @LazyProperty
199 @LazyProperty
200 def repos_path(self):
200 def repos_path(self):
201 """
201 """
202 Gets the repositories root path from database
202 Gets the repositories root path from database
203 """
203 """
204
204
205 settings_model = VcsSettingsModel(sa=self.sa)
205 settings_model = VcsSettingsModel(sa=self.sa)
206 return settings_model.get_repos_location()
206 return settings_model.get_repos_location()
207
207
208 def repo_scan(self, repos_path=None):
208 def repo_scan(self, repos_path=None):
209 """
209 """
210 Listing of repositories in given path. This path should not be a
210 Listing of repositories in given path. This path should not be a
211 repository itself. Return a dictionary of repository objects
211 repository itself. Return a dictionary of repository objects
212
212
213 :param repos_path: path to directory containing repositories
213 :param repos_path: path to directory containing repositories
214 """
214 """
215
215
216 if repos_path is None:
216 if repos_path is None:
217 repos_path = self.repos_path
217 repos_path = self.repos_path
218
218
219 log.info('scanning for repositories in %s', repos_path)
219 log.info('scanning for repositories in %s', repos_path)
220
220
221 config = make_db_config()
221 config = make_db_config()
222 config.set('extensions', 'largefiles', '')
222 config.set('extensions', 'largefiles', '')
223 repos = {}
223 repos = {}
224
224
225 for name, path in get_filesystem_repos(repos_path, recursive=True):
225 for name, path in get_filesystem_repos(repos_path, recursive=True):
226 # name need to be decomposed and put back together using the /
226 # name need to be decomposed and put back together using the /
227 # since this is internal storage separator for rhodecode
227 # since this is internal storage separator for rhodecode
228 name = Repository.normalize_repo_name(name)
228 name = Repository.normalize_repo_name(name)
229
229
230 try:
230 try:
231 if name in repos:
231 if name in repos:
232 raise RepositoryError('Duplicate repository name %s '
232 raise RepositoryError('Duplicate repository name %s '
233 'found in %s' % (name, path))
233 'found in %s' % (name, path))
234 elif path[0] in rhodecode.BACKENDS:
234 elif path[0] in rhodecode.BACKENDS:
235 klass = get_backend(path[0])
235 klass = get_backend(path[0])
236 repos[name] = klass(path[1], config=config)
236 repos[name] = klass(path[1], config=config)
237 except OSError:
237 except OSError:
238 continue
238 continue
239 log.debug('found %s paths with repositories', len(repos))
239 log.debug('found %s paths with repositories', len(repos))
240 return repos
240 return repos
241
241
242 def get_repos(self, all_repos=None, sort_key=None):
242 def get_repos(self, all_repos=None, sort_key=None):
243 """
243 """
244 Get all repositories from db and for each repo create it's
244 Get all repositories from db and for each repo create it's
245 backend instance and fill that backed with information from database
245 backend instance and fill that backed with information from database
246
246
247 :param all_repos: list of repository names as strings
247 :param all_repos: list of repository names as strings
248 give specific repositories list, good for filtering
248 give specific repositories list, good for filtering
249
249
250 :param sort_key: initial sorting of repositories
250 :param sort_key: initial sorting of repositories
251 """
251 """
252 if all_repos is None:
252 if all_repos is None:
253 all_repos = self.sa.query(Repository)\
253 all_repos = self.sa.query(Repository)\
254 .filter(Repository.group_id == None)\
254 .filter(Repository.group_id == None)\
255 .order_by(func.lower(Repository.repo_name)).all()
255 .order_by(func.lower(Repository.repo_name)).all()
256 repo_iter = SimpleCachedRepoList(
256 repo_iter = SimpleCachedRepoList(
257 all_repos, repos_path=self.repos_path, order_by=sort_key)
257 all_repos, repos_path=self.repos_path, order_by=sort_key)
258 return repo_iter
258 return repo_iter
259
259
260 def get_repo_groups(self, all_groups=None):
260 def get_repo_groups(self, all_groups=None):
261 if all_groups is None:
261 if all_groups is None:
262 all_groups = RepoGroup.query()\
262 all_groups = RepoGroup.query()\
263 .filter(RepoGroup.group_parent_id == None).all()
263 .filter(RepoGroup.group_parent_id == None).all()
264 return [x for x in RepoGroupList(all_groups)]
264 return [x for x in RepoGroupList(all_groups)]
265
265
266 def mark_for_invalidation(self, repo_name, delete=False):
266 def mark_for_invalidation(self, repo_name, delete=False):
267 """
267 """
268 Mark caches of this repo invalid in the database. `delete` flag
268 Mark caches of this repo invalid in the database. `delete` flag
269 removes the cache entries
269 removes the cache entries
270
270
271 :param repo_name: the repo_name for which caches should be marked
271 :param repo_name: the repo_name for which caches should be marked
272 invalid, or deleted
272 invalid, or deleted
273 :param delete: delete the entry keys instead of setting bool
273 :param delete: delete the entry keys instead of setting bool
274 flag on them
274 flag on them
275 """
275 """
276 CacheKey.set_invalidate(repo_name, delete=delete)
276 CacheKey.set_invalidate(repo_name, delete=delete)
277 repo = Repository.get_by_repo_name(repo_name)
277 repo = Repository.get_by_repo_name(repo_name)
278
278
279 if repo:
279 if repo:
280 config = repo._config
280 config = repo._config
281 config.set('extensions', 'largefiles', '')
281 config.set('extensions', 'largefiles', '')
282 repo.update_commit_cache(config=config, cs_cache=None)
282 repo.update_commit_cache(config=config, cs_cache=None)
283 caches.clear_repo_caches(repo_name)
283 caches.clear_repo_caches(repo_name)
284
284
285 def toggle_following_repo(self, follow_repo_id, user_id):
285 def toggle_following_repo(self, follow_repo_id, user_id):
286
286
287 f = self.sa.query(UserFollowing)\
287 f = self.sa.query(UserFollowing)\
288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 .filter(UserFollowing.user_id == user_id).scalar()
289 .filter(UserFollowing.user_id == user_id).scalar()
290
290
291 if f is not None:
291 if f is not None:
292 try:
292 try:
293 self.sa.delete(f)
293 self.sa.delete(f)
294 action_logger(UserTemp(user_id),
294 action_logger(UserTemp(user_id),
295 'stopped_following_repo',
295 'stopped_following_repo',
296 RepoTemp(follow_repo_id))
296 RepoTemp(follow_repo_id))
297 return
297 return
298 except Exception:
298 except Exception:
299 log.error(traceback.format_exc())
299 log.error(traceback.format_exc())
300 raise
300 raise
301
301
302 try:
302 try:
303 f = UserFollowing()
303 f = UserFollowing()
304 f.user_id = user_id
304 f.user_id = user_id
305 f.follows_repo_id = follow_repo_id
305 f.follows_repo_id = follow_repo_id
306 self.sa.add(f)
306 self.sa.add(f)
307
307
308 action_logger(UserTemp(user_id),
308 action_logger(UserTemp(user_id),
309 'started_following_repo',
309 'started_following_repo',
310 RepoTemp(follow_repo_id))
310 RepoTemp(follow_repo_id))
311 except Exception:
311 except Exception:
312 log.error(traceback.format_exc())
312 log.error(traceback.format_exc())
313 raise
313 raise
314
314
315 def toggle_following_user(self, follow_user_id, user_id):
315 def toggle_following_user(self, follow_user_id, user_id):
316 f = self.sa.query(UserFollowing)\
316 f = self.sa.query(UserFollowing)\
317 .filter(UserFollowing.follows_user_id == follow_user_id)\
317 .filter(UserFollowing.follows_user_id == follow_user_id)\
318 .filter(UserFollowing.user_id == user_id).scalar()
318 .filter(UserFollowing.user_id == user_id).scalar()
319
319
320 if f is not None:
320 if f is not None:
321 try:
321 try:
322 self.sa.delete(f)
322 self.sa.delete(f)
323 return
323 return
324 except Exception:
324 except Exception:
325 log.error(traceback.format_exc())
325 log.error(traceback.format_exc())
326 raise
326 raise
327
327
328 try:
328 try:
329 f = UserFollowing()
329 f = UserFollowing()
330 f.user_id = user_id
330 f.user_id = user_id
331 f.follows_user_id = follow_user_id
331 f.follows_user_id = follow_user_id
332 self.sa.add(f)
332 self.sa.add(f)
333 except Exception:
333 except Exception:
334 log.error(traceback.format_exc())
334 log.error(traceback.format_exc())
335 raise
335 raise
336
336
337 def is_following_repo(self, repo_name, user_id, cache=False):
337 def is_following_repo(self, repo_name, user_id, cache=False):
338 r = self.sa.query(Repository)\
338 r = self.sa.query(Repository)\
339 .filter(Repository.repo_name == repo_name).scalar()
339 .filter(Repository.repo_name == repo_name).scalar()
340
340
341 f = self.sa.query(UserFollowing)\
341 f = self.sa.query(UserFollowing)\
342 .filter(UserFollowing.follows_repository == r)\
342 .filter(UserFollowing.follows_repository == r)\
343 .filter(UserFollowing.user_id == user_id).scalar()
343 .filter(UserFollowing.user_id == user_id).scalar()
344
344
345 return f is not None
345 return f is not None
346
346
347 def is_following_user(self, username, user_id, cache=False):
347 def is_following_user(self, username, user_id, cache=False):
348 u = User.get_by_username(username)
348 u = User.get_by_username(username)
349
349
350 f = self.sa.query(UserFollowing)\
350 f = self.sa.query(UserFollowing)\
351 .filter(UserFollowing.follows_user == u)\
351 .filter(UserFollowing.follows_user == u)\
352 .filter(UserFollowing.user_id == user_id).scalar()
352 .filter(UserFollowing.user_id == user_id).scalar()
353
353
354 return f is not None
354 return f is not None
355
355
356 def get_followers(self, repo):
356 def get_followers(self, repo):
357 repo = self._get_repo(repo)
357 repo = self._get_repo(repo)
358
358
359 return self.sa.query(UserFollowing)\
359 return self.sa.query(UserFollowing)\
360 .filter(UserFollowing.follows_repository == repo).count()
360 .filter(UserFollowing.follows_repository == repo).count()
361
361
362 def get_forks(self, repo):
362 def get_forks(self, repo):
363 repo = self._get_repo(repo)
363 repo = self._get_repo(repo)
364 return self.sa.query(Repository)\
364 return self.sa.query(Repository)\
365 .filter(Repository.fork == repo).count()
365 .filter(Repository.fork == repo).count()
366
366
367 def get_pull_requests(self, repo):
367 def get_pull_requests(self, repo):
368 repo = self._get_repo(repo)
368 repo = self._get_repo(repo)
369 return self.sa.query(PullRequest)\
369 return self.sa.query(PullRequest)\
370 .filter(PullRequest.target_repo == repo)\
370 .filter(PullRequest.target_repo == repo)\
371 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
371 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
372
372
373 def mark_as_fork(self, repo, fork, user):
373 def mark_as_fork(self, repo, fork, user):
374 repo = self._get_repo(repo)
374 repo = self._get_repo(repo)
375 fork = self._get_repo(fork)
375 fork = self._get_repo(fork)
376 if fork and repo.repo_id == fork.repo_id:
376 if fork and repo.repo_id == fork.repo_id:
377 raise Exception("Cannot set repository as fork of itself")
377 raise Exception("Cannot set repository as fork of itself")
378
378
379 if fork and repo.repo_type != fork.repo_type:
379 if fork and repo.repo_type != fork.repo_type:
380 raise RepositoryError(
380 raise RepositoryError(
381 "Cannot set repository as fork of repository with other type")
381 "Cannot set repository as fork of repository with other type")
382
382
383 repo.fork = fork
383 repo.fork = fork
384 self.sa.add(repo)
384 self.sa.add(repo)
385 return repo
385 return repo
386
386
387 def pull_changes(self, repo, username):
387 def pull_changes(self, repo, username):
388 dbrepo = self._get_repo(repo)
388 dbrepo = self._get_repo(repo)
389 clone_uri = dbrepo.clone_uri
389 clone_uri = dbrepo.clone_uri
390 if not clone_uri:
390 if not clone_uri:
391 raise Exception("This repository doesn't have a clone uri")
391 raise Exception("This repository doesn't have a clone uri")
392
392
393 repo = dbrepo.scm_instance(cache=False)
393 repo = dbrepo.scm_instance(cache=False)
394 # TODO: marcink fix this an re-enable since we need common logic
394 # TODO: marcink fix this an re-enable since we need common logic
395 # for hg/git remove hooks so we don't trigger them on fetching
395 # for hg/git remove hooks so we don't trigger them on fetching
396 # commits from remote
396 # commits from remote
397 repo.config.clear_section('hooks')
397 repo.config.clear_section('hooks')
398
398
399 repo_name = dbrepo.repo_name
399 repo_name = dbrepo.repo_name
400 try:
400 try:
401 # TODO: we need to make sure those operations call proper hooks !
401 # TODO: we need to make sure those operations call proper hooks !
402 repo.pull(clone_uri)
402 repo.pull(clone_uri)
403
403
404 self.mark_for_invalidation(repo_name)
404 self.mark_for_invalidation(repo_name)
405 except Exception:
405 except Exception:
406 log.error(traceback.format_exc())
406 log.error(traceback.format_exc())
407 raise
407 raise
408
408
409 def commit_change(self, repo, repo_name, commit, user, author, message,
409 def commit_change(self, repo, repo_name, commit, user, author, message,
410 content, f_path):
410 content, f_path):
411 """
411 """
412 Commits changes
412 Commits changes
413
413
414 :param repo: SCM instance
414 :param repo: SCM instance
415
415
416 """
416 """
417 user = self._get_user(user)
417 user = self._get_user(user)
418
418
419 # decoding here will force that we have proper encoded values
419 # decoding here will force that we have proper encoded values
420 # in any other case this will throw exceptions and deny commit
420 # in any other case this will throw exceptions and deny commit
421 content = safe_str(content)
421 content = safe_str(content)
422 path = safe_str(f_path)
422 path = safe_str(f_path)
423 # message and author needs to be unicode
423 # message and author needs to be unicode
424 # proper backend should then translate that into required type
424 # proper backend should then translate that into required type
425 message = safe_unicode(message)
425 message = safe_unicode(message)
426 author = safe_unicode(author)
426 author = safe_unicode(author)
427 imc = repo.in_memory_commit
427 imc = repo.in_memory_commit
428 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
428 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
429 try:
429 try:
430 # TODO: handle pre-push action !
430 # TODO: handle pre-push action !
431 tip = imc.commit(
431 tip = imc.commit(
432 message=message, author=author, parents=[commit],
432 message=message, author=author, parents=[commit],
433 branch=commit.branch)
433 branch=commit.branch)
434 except Exception as e:
434 except Exception as e:
435 log.error(traceback.format_exc())
435 log.error(traceback.format_exc())
436 raise IMCCommitError(str(e))
436 raise IMCCommitError(str(e))
437 finally:
437 finally:
438 # always clear caches, if commit fails we want fresh object also
438 # always clear caches, if commit fails we want fresh object also
439 self.mark_for_invalidation(repo_name)
439 self.mark_for_invalidation(repo_name)
440
440
441 # We trigger the post-push action
441 # We trigger the post-push action
442 hooks_utils.trigger_post_push_hook(
442 hooks_utils.trigger_post_push_hook(
443 username=user.username, action='push_local', repo_name=repo_name,
443 username=user.username, action='push_local', repo_name=repo_name,
444 repo_alias=repo.alias, commit_ids=[tip.raw_id])
444 repo_alias=repo.alias, commit_ids=[tip.raw_id])
445 return tip
445 return tip
446
446
447 def _sanitize_path(self, f_path):
447 def _sanitize_path(self, f_path):
448 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
448 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
449 raise NonRelativePathError('%s is not an relative path' % f_path)
449 raise NonRelativePathError('%s is not an relative path' % f_path)
450 if f_path:
450 if f_path:
451 f_path = os.path.normpath(f_path)
451 f_path = os.path.normpath(f_path)
452 return f_path
452 return f_path
453
453
454 def get_dirnode_metadata(self, commit, dir_node):
454 def get_dirnode_metadata(self, commit, dir_node):
455 if not dir_node.is_dir():
455 if not dir_node.is_dir():
456 return []
456 return []
457
457
458 data = []
458 data = []
459 for node in dir_node:
459 for node in dir_node:
460 if not node.is_file():
460 if not node.is_file():
461 # we skip file-nodes
461 # we skip file-nodes
462 continue
462 continue
463
463
464 last_commit = node.last_commit
464 last_commit = node.last_commit
465 last_commit_date = last_commit.date
465 last_commit_date = last_commit.date
466 data.append({
466 data.append({
467 'name': node.name,
467 'name': node.name,
468 'size': h.format_byte_size_binary(node.size),
468 'size': h.format_byte_size_binary(node.size),
469 'modified_at': h.format_date(last_commit_date),
469 'modified_at': h.format_date(last_commit_date),
470 'modified_ts': last_commit_date.isoformat(),
470 'modified_ts': last_commit_date.isoformat(),
471 'revision': last_commit.revision,
471 'revision': last_commit.revision,
472 'short_id': last_commit.short_id,
472 'short_id': last_commit.short_id,
473 'message': h.escape(last_commit.message),
473 'message': h.escape(last_commit.message),
474 'author': h.escape(last_commit.author),
474 'author': h.escape(last_commit.author),
475 'user_profile': h.gravatar_with_user(last_commit.author),
475 'user_profile': h.gravatar_with_user(last_commit.author),
476 })
476 })
477
477
478 return data
478 return data
479
479
480 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
480 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
481 extended_info=False, content=False, max_file_bytes=None):
481 extended_info=False, content=False, max_file_bytes=None):
482 """
482 """
483 recursive walk in root dir and return a set of all path in that dir
483 recursive walk in root dir and return a set of all path in that dir
484 based on repository walk function
484 based on repository walk function
485
485
486 :param repo_name: name of repository
486 :param repo_name: name of repository
487 :param commit_id: commit id for which to list nodes
487 :param commit_id: commit id for which to list nodes
488 :param root_path: root path to list
488 :param root_path: root path to list
489 :param flat: return as a list, if False returns a dict with description
489 :param flat: return as a list, if False returns a dict with description
490 :param max_file_bytes: will not return file contents over this limit
490 :param max_file_bytes: will not return file contents over this limit
491
491
492 """
492 """
493 _files = list()
493 _files = list()
494 _dirs = list()
494 _dirs = list()
495 try:
495 try:
496 _repo = self._get_repo(repo_name)
496 _repo = self._get_repo(repo_name)
497 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
497 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
498 root_path = root_path.lstrip('/')
498 root_path = root_path.lstrip('/')
499 for __, dirs, files in commit.walk(root_path):
499 for __, dirs, files in commit.walk(root_path):
500 for f in files:
500 for f in files:
501 _content = None
501 _content = None
502 _data = f.unicode_path
502 _data = f.unicode_path
503 over_size_limit = (max_file_bytes is not None
503 over_size_limit = (max_file_bytes is not None
504 and f.size > max_file_bytes)
504 and f.size > max_file_bytes)
505
505
506 if not flat:
506 if not flat:
507 _data = {
507 _data = {
508 "name": f.unicode_path,
508 "name": f.unicode_path,
509 "type": "file",
509 "type": "file",
510 }
510 }
511 if extended_info:
511 if extended_info:
512 _data.update({
512 _data.update({
513 "md5": f.md5,
513 "md5": f.md5,
514 "binary": f.is_binary,
514 "binary": f.is_binary,
515 "size": f.size,
515 "size": f.size,
516 "extension": f.extension,
516 "extension": f.extension,
517 "mimetype": f.mimetype,
517 "mimetype": f.mimetype,
518 "lines": f.lines()[0]
518 "lines": f.lines()[0]
519 })
519 })
520
520
521 if content:
521 if content:
522 full_content = None
522 full_content = None
523 if not f.is_binary and not over_size_limit:
523 if not f.is_binary and not over_size_limit:
524 full_content = safe_str(f.content)
524 full_content = safe_str(f.content)
525
525
526 _data.update({
526 _data.update({
527 "content": full_content,
527 "content": full_content,
528 })
528 })
529 _files.append(_data)
529 _files.append(_data)
530 for d in dirs:
530 for d in dirs:
531 _data = d.unicode_path
531 _data = d.unicode_path
532 if not flat:
532 if not flat:
533 _data = {
533 _data = {
534 "name": d.unicode_path,
534 "name": d.unicode_path,
535 "type": "dir",
535 "type": "dir",
536 }
536 }
537 if extended_info:
537 if extended_info:
538 _data.update({
538 _data.update({
539 "md5": None,
539 "md5": None,
540 "binary": None,
540 "binary": None,
541 "size": None,
541 "size": None,
542 "extension": None,
542 "extension": None,
543 })
543 })
544 if content:
544 if content:
545 _data.update({
545 _data.update({
546 "content": None
546 "content": None
547 })
547 })
548 _dirs.append(_data)
548 _dirs.append(_data)
549 except RepositoryError:
549 except RepositoryError:
550 log.debug("Exception in get_nodes", exc_info=True)
550 log.debug("Exception in get_nodes", exc_info=True)
551 raise
551 raise
552
552
553 return _dirs, _files
553 return _dirs, _files
554
554
555 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
555 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
556 author=None, trigger_push_hook=True):
556 author=None, trigger_push_hook=True):
557 """
557 """
558 Commits given multiple nodes into repo
558 Commits given multiple nodes into repo
559
559
560 :param user: RhodeCode User object or user_id, the commiter
560 :param user: RhodeCode User object or user_id, the commiter
561 :param repo: RhodeCode Repository object
561 :param repo: RhodeCode Repository object
562 :param message: commit message
562 :param message: commit message
563 :param nodes: mapping {filename:{'content':content},...}
563 :param nodes: mapping {filename:{'content':content},...}
564 :param parent_commit: parent commit, can be empty than it's
564 :param parent_commit: parent commit, can be empty than it's
565 initial commit
565 initial commit
566 :param author: author of commit, cna be different that commiter
566 :param author: author of commit, cna be different that commiter
567 only for git
567 only for git
568 :param trigger_push_hook: trigger push hooks
568 :param trigger_push_hook: trigger push hooks
569
569
570 :returns: new commited commit
570 :returns: new commited commit
571 """
571 """
572
572
573 user = self._get_user(user)
573 user = self._get_user(user)
574 scm_instance = repo.scm_instance(cache=False)
574 scm_instance = repo.scm_instance(cache=False)
575
575
576 processed_nodes = []
576 processed_nodes = []
577 for f_path in nodes:
577 for f_path in nodes:
578 f_path = self._sanitize_path(f_path)
578 f_path = self._sanitize_path(f_path)
579 content = nodes[f_path]['content']
579 content = nodes[f_path]['content']
580 f_path = safe_str(f_path)
580 f_path = safe_str(f_path)
581 # decoding here will force that we have proper encoded values
581 # decoding here will force that we have proper encoded values
582 # in any other case this will throw exceptions and deny commit
582 # in any other case this will throw exceptions and deny commit
583 if isinstance(content, (basestring,)):
583 if isinstance(content, (basestring,)):
584 content = safe_str(content)
584 content = safe_str(content)
585 elif isinstance(content, (file, cStringIO.OutputType,)):
585 elif isinstance(content, (file, cStringIO.OutputType,)):
586 content = content.read()
586 content = content.read()
587 else:
587 else:
588 raise Exception('Content is of unrecognized type %s' % (
588 raise Exception('Content is of unrecognized type %s' % (
589 type(content)
589 type(content)
590 ))
590 ))
591 processed_nodes.append((f_path, content))
591 processed_nodes.append((f_path, content))
592
592
593 message = safe_unicode(message)
593 message = safe_unicode(message)
594 commiter = user.full_contact
594 commiter = user.full_contact
595 author = safe_unicode(author) if author else commiter
595 author = safe_unicode(author) if author else commiter
596
596
597 imc = scm_instance.in_memory_commit
597 imc = scm_instance.in_memory_commit
598
598
599 if not parent_commit:
599 if not parent_commit:
600 parent_commit = EmptyCommit(alias=scm_instance.alias)
600 parent_commit = EmptyCommit(alias=scm_instance.alias)
601
601
602 if isinstance(parent_commit, EmptyCommit):
602 if isinstance(parent_commit, EmptyCommit):
603 # EmptyCommit means we we're editing empty repository
603 # EmptyCommit means we we're editing empty repository
604 parents = None
604 parents = None
605 else:
605 else:
606 parents = [parent_commit]
606 parents = [parent_commit]
607 # add multiple nodes
607 # add multiple nodes
608 for path, content in processed_nodes:
608 for path, content in processed_nodes:
609 imc.add(FileNode(path, content=content))
609 imc.add(FileNode(path, content=content))
610 # TODO: handle pre push scenario
610 # TODO: handle pre push scenario
611 tip = imc.commit(message=message,
611 tip = imc.commit(message=message,
612 author=author,
612 author=author,
613 parents=parents,
613 parents=parents,
614 branch=parent_commit.branch)
614 branch=parent_commit.branch)
615
615
616 self.mark_for_invalidation(repo.repo_name)
616 self.mark_for_invalidation(repo.repo_name)
617 if trigger_push_hook:
617 if trigger_push_hook:
618 hooks_utils.trigger_post_push_hook(
618 hooks_utils.trigger_post_push_hook(
619 username=user.username, action='push_local',
619 username=user.username, action='push_local',
620 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
620 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
621 commit_ids=[tip.raw_id])
621 commit_ids=[tip.raw_id])
622 return tip
622 return tip
623
623
624 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
624 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
625 author=None, trigger_push_hook=True):
625 author=None, trigger_push_hook=True):
626 user = self._get_user(user)
626 user = self._get_user(user)
627 scm_instance = repo.scm_instance(cache=False)
627 scm_instance = repo.scm_instance(cache=False)
628
628
629 message = safe_unicode(message)
629 message = safe_unicode(message)
630 commiter = user.full_contact
630 commiter = user.full_contact
631 author = safe_unicode(author) if author else commiter
631 author = safe_unicode(author) if author else commiter
632
632
633 imc = scm_instance.in_memory_commit
633 imc = scm_instance.in_memory_commit
634
634
635 if not parent_commit:
635 if not parent_commit:
636 parent_commit = EmptyCommit(alias=scm_instance.alias)
636 parent_commit = EmptyCommit(alias=scm_instance.alias)
637
637
638 if isinstance(parent_commit, EmptyCommit):
638 if isinstance(parent_commit, EmptyCommit):
639 # EmptyCommit means we we're editing empty repository
639 # EmptyCommit means we we're editing empty repository
640 parents = None
640 parents = None
641 else:
641 else:
642 parents = [parent_commit]
642 parents = [parent_commit]
643
643
644 # add multiple nodes
644 # add multiple nodes
645 for _filename, data in nodes.items():
645 for _filename, data in nodes.items():
646 # new filename, can be renamed from the old one, also sanitaze
646 # new filename, can be renamed from the old one, also sanitaze
647 # the path for any hack around relative paths like ../../ etc.
647 # the path for any hack around relative paths like ../../ etc.
648 filename = self._sanitize_path(data['filename'])
648 filename = self._sanitize_path(data['filename'])
649 old_filename = self._sanitize_path(_filename)
649 old_filename = self._sanitize_path(_filename)
650 content = data['content']
650 content = data['content']
651
651
652 filenode = FileNode(old_filename, content=content)
652 filenode = FileNode(old_filename, content=content)
653 op = data['op']
653 op = data['op']
654 if op == 'add':
654 if op == 'add':
655 imc.add(filenode)
655 imc.add(filenode)
656 elif op == 'del':
656 elif op == 'del':
657 imc.remove(filenode)
657 imc.remove(filenode)
658 elif op == 'mod':
658 elif op == 'mod':
659 if filename != old_filename:
659 if filename != old_filename:
660 # TODO: handle renames more efficient, needs vcs lib
660 # TODO: handle renames more efficient, needs vcs lib
661 # changes
661 # changes
662 imc.remove(filenode)
662 imc.remove(filenode)
663 imc.add(FileNode(filename, content=content))
663 imc.add(FileNode(filename, content=content))
664 else:
664 else:
665 imc.change(filenode)
665 imc.change(filenode)
666
666
667 try:
667 try:
668 # TODO: handle pre push scenario
668 # TODO: handle pre push scenario
669 # commit changes
669 # commit changes
670 tip = imc.commit(message=message,
670 tip = imc.commit(message=message,
671 author=author,
671 author=author,
672 parents=parents,
672 parents=parents,
673 branch=parent_commit.branch)
673 branch=parent_commit.branch)
674 except NodeNotChangedError:
674 except NodeNotChangedError:
675 raise
675 raise
676 except Exception as e:
676 except Exception as e:
677 log.exception("Unexpected exception during call to imc.commit")
677 log.exception("Unexpected exception during call to imc.commit")
678 raise IMCCommitError(str(e))
678 raise IMCCommitError(str(e))
679 finally:
679 finally:
680 # always clear caches, if commit fails we want fresh object also
680 # always clear caches, if commit fails we want fresh object also
681 self.mark_for_invalidation(repo.repo_name)
681 self.mark_for_invalidation(repo.repo_name)
682
682
683 if trigger_push_hook:
683 if trigger_push_hook:
684 hooks_utils.trigger_post_push_hook(
684 hooks_utils.trigger_post_push_hook(
685 username=user.username, action='push_local',
685 username=user.username, action='push_local',
686 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
686 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
687 commit_ids=[tip.raw_id])
687 commit_ids=[tip.raw_id])
688
688
689 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
689 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
690 author=None, trigger_push_hook=True):
690 author=None, trigger_push_hook=True):
691 """
691 """
692 Deletes given multiple nodes into `repo`
692 Deletes given multiple nodes into `repo`
693
693
694 :param user: RhodeCode User object or user_id, the committer
694 :param user: RhodeCode User object or user_id, the committer
695 :param repo: RhodeCode Repository object
695 :param repo: RhodeCode Repository object
696 :param message: commit message
696 :param message: commit message
697 :param nodes: mapping {filename:{'content':content},...}
697 :param nodes: mapping {filename:{'content':content},...}
698 :param parent_commit: parent commit, can be empty than it's initial
698 :param parent_commit: parent commit, can be empty than it's initial
699 commit
699 commit
700 :param author: author of commit, cna be different that commiter only
700 :param author: author of commit, cna be different that commiter only
701 for git
701 for git
702 :param trigger_push_hook: trigger push hooks
702 :param trigger_push_hook: trigger push hooks
703
703
704 :returns: new commit after deletion
704 :returns: new commit after deletion
705 """
705 """
706
706
707 user = self._get_user(user)
707 user = self._get_user(user)
708 scm_instance = repo.scm_instance(cache=False)
708 scm_instance = repo.scm_instance(cache=False)
709
709
710 processed_nodes = []
710 processed_nodes = []
711 for f_path in nodes:
711 for f_path in nodes:
712 f_path = self._sanitize_path(f_path)
712 f_path = self._sanitize_path(f_path)
713 # content can be empty but for compatabilty it allows same dicts
713 # content can be empty but for compatabilty it allows same dicts
714 # structure as add_nodes
714 # structure as add_nodes
715 content = nodes[f_path].get('content')
715 content = nodes[f_path].get('content')
716 processed_nodes.append((f_path, content))
716 processed_nodes.append((f_path, content))
717
717
718 message = safe_unicode(message)
718 message = safe_unicode(message)
719 commiter = user.full_contact
719 commiter = user.full_contact
720 author = safe_unicode(author) if author else commiter
720 author = safe_unicode(author) if author else commiter
721
721
722 imc = scm_instance.in_memory_commit
722 imc = scm_instance.in_memory_commit
723
723
724 if not parent_commit:
724 if not parent_commit:
725 parent_commit = EmptyCommit(alias=scm_instance.alias)
725 parent_commit = EmptyCommit(alias=scm_instance.alias)
726
726
727 if isinstance(parent_commit, EmptyCommit):
727 if isinstance(parent_commit, EmptyCommit):
728 # EmptyCommit means we we're editing empty repository
728 # EmptyCommit means we we're editing empty repository
729 parents = None
729 parents = None
730 else:
730 else:
731 parents = [parent_commit]
731 parents = [parent_commit]
732 # add multiple nodes
732 # add multiple nodes
733 for path, content in processed_nodes:
733 for path, content in processed_nodes:
734 imc.remove(FileNode(path, content=content))
734 imc.remove(FileNode(path, content=content))
735
735
736 # TODO: handle pre push scenario
736 # TODO: handle pre push scenario
737 tip = imc.commit(message=message,
737 tip = imc.commit(message=message,
738 author=author,
738 author=author,
739 parents=parents,
739 parents=parents,
740 branch=parent_commit.branch)
740 branch=parent_commit.branch)
741
741
742 self.mark_for_invalidation(repo.repo_name)
742 self.mark_for_invalidation(repo.repo_name)
743 if trigger_push_hook:
743 if trigger_push_hook:
744 hooks_utils.trigger_post_push_hook(
744 hooks_utils.trigger_post_push_hook(
745 username=user.username, action='push_local',
745 username=user.username, action='push_local',
746 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
746 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
747 commit_ids=[tip.raw_id])
747 commit_ids=[tip.raw_id])
748 return tip
748 return tip
749
749
750 def strip(self, repo, commit_id, branch):
750 def strip(self, repo, commit_id, branch):
751 scm_instance = repo.scm_instance(cache=False)
751 scm_instance = repo.scm_instance(cache=False)
752 scm_instance.config.clear_section('hooks')
752 scm_instance.config.clear_section('hooks')
753 scm_instance.strip(commit_id, branch)
753 scm_instance.strip(commit_id, branch)
754 self.mark_for_invalidation(repo.repo_name)
754 self.mark_for_invalidation(repo.repo_name)
755
755
756 def get_unread_journal(self):
756 def get_unread_journal(self):
757 return self.sa.query(UserLog).count()
757 return self.sa.query(UserLog).count()
758
758
759 def get_repo_landing_revs(self, repo=None):
759 def get_repo_landing_revs(self, repo=None):
760 """
760 """
761 Generates select option with tags branches and bookmarks (for hg only)
761 Generates select option with tags branches and bookmarks (for hg only)
762 grouped by type
762 grouped by type
763
763
764 :param repo:
764 :param repo:
765 """
765 """
766
766
767 hist_l = []
767 hist_l = []
768 choices = []
768 choices = []
769 repo = self._get_repo(repo)
769 repo = self._get_repo(repo)
770 hist_l.append(['rev:tip', _('latest tip')])
770 hist_l.append(['rev:tip', _('latest tip')])
771 choices.append('rev:tip')
771 choices.append('rev:tip')
772 if not repo:
772 if not repo:
773 return choices, hist_l
773 return choices, hist_l
774
774
775 repo = repo.scm_instance()
775 repo = repo.scm_instance()
776
776
777 branches_group = (
777 branches_group = (
778 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
778 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
779 for b in repo.branches],
779 for b in repo.branches],
780 _("Branches"))
780 _("Branches"))
781 hist_l.append(branches_group)
781 hist_l.append(branches_group)
782 choices.extend([x[0] for x in branches_group[0]])
782 choices.extend([x[0] for x in branches_group[0]])
783
783
784 if repo.alias == 'hg':
784 if repo.alias == 'hg':
785 bookmarks_group = (
785 bookmarks_group = (
786 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
786 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
787 for b in repo.bookmarks],
787 for b in repo.bookmarks],
788 _("Bookmarks"))
788 _("Bookmarks"))
789 hist_l.append(bookmarks_group)
789 hist_l.append(bookmarks_group)
790 choices.extend([x[0] for x in bookmarks_group[0]])
790 choices.extend([x[0] for x in bookmarks_group[0]])
791
791
792 tags_group = (
792 tags_group = (
793 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
793 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
794 for t in repo.tags],
794 for t in repo.tags],
795 _("Tags"))
795 _("Tags"))
796 hist_l.append(tags_group)
796 hist_l.append(tags_group)
797 choices.extend([x[0] for x in tags_group[0]])
797 choices.extend([x[0] for x in tags_group[0]])
798
798
799 return choices, hist_l
799 return choices, hist_l
800
800
801 def install_git_hook(self, repo, force_create=False):
801 def install_git_hook(self, repo, force_create=False):
802 """
802 """
803 Creates a rhodecode hook inside a git repository
803 Creates a rhodecode hook inside a git repository
804
804
805 :param repo: Instance of VCS repo
805 :param repo: Instance of VCS repo
806 :param force_create: Create even if same name hook exists
806 :param force_create: Create even if same name hook exists
807 """
807 """
808
808
809 loc = os.path.join(repo.path, 'hooks')
809 loc = os.path.join(repo.path, 'hooks')
810 if not repo.bare:
810 if not repo.bare:
811 loc = os.path.join(repo.path, '.git', 'hooks')
811 loc = os.path.join(repo.path, '.git', 'hooks')
812 if not os.path.isdir(loc):
812 if not os.path.isdir(loc):
813 os.makedirs(loc, mode=0777)
813 os.makedirs(loc, mode=0777)
814
814
815 tmpl_post = pkg_resources.resource_string(
815 tmpl_post = pkg_resources.resource_string(
816 'rhodecode', '/'.join(
816 'rhodecode', '/'.join(
817 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
817 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
818 tmpl_pre = pkg_resources.resource_string(
818 tmpl_pre = pkg_resources.resource_string(
819 'rhodecode', '/'.join(
819 'rhodecode', '/'.join(
820 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
820 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
821
821
822 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
822 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
823 _hook_file = os.path.join(loc, '%s-receive' % h_type)
823 _hook_file = os.path.join(loc, '%s-receive' % h_type)
824 log.debug('Installing git hook in repo %s', repo)
824 log.debug('Installing git hook in repo %s', repo)
825 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
825 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
826
826
827 if _rhodecode_hook or force_create:
827 if _rhodecode_hook or force_create:
828 log.debug('writing %s hook file !', h_type)
828 log.debug('writing %s hook file !', h_type)
829 try:
829 try:
830 with open(_hook_file, 'wb') as f:
830 with open(_hook_file, 'wb') as f:
831 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
831 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
832 tmpl = tmpl.replace('_ENV_', sys.executable)
832 tmpl = tmpl.replace('_ENV_', sys.executable)
833 f.write(tmpl)
833 f.write(tmpl)
834 os.chmod(_hook_file, 0755)
834 os.chmod(_hook_file, 0755)
835 except IOError:
835 except IOError:
836 log.exception('error writing hook file %s', _hook_file)
836 log.exception('error writing hook file %s', _hook_file)
837 else:
837 else:
838 log.debug('skipping writing hook file')
838 log.debug('skipping writing hook file')
839
839
840 def install_svn_hooks(self, repo, force_create=False):
840 def install_svn_hooks(self, repo, force_create=False):
841 """
841 """
842 Creates rhodecode hooks inside a svn repository
842 Creates rhodecode hooks inside a svn repository
843
843
844 :param repo: Instance of VCS repo
844 :param repo: Instance of VCS repo
845 :param force_create: Create even if same name hook exists
845 :param force_create: Create even if same name hook exists
846 """
846 """
847 hooks_path = os.path.join(repo.path, 'hooks')
847 hooks_path = os.path.join(repo.path, 'hooks')
848 if not os.path.isdir(hooks_path):
848 if not os.path.isdir(hooks_path):
849 os.makedirs(hooks_path)
849 os.makedirs(hooks_path)
850 post_commit_tmpl = pkg_resources.resource_string(
850 post_commit_tmpl = pkg_resources.resource_string(
851 'rhodecode', '/'.join(
851 'rhodecode', '/'.join(
852 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
852 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
853 pre_commit_template = pkg_resources.resource_string(
853 pre_commit_template = pkg_resources.resource_string(
854 'rhodecode', '/'.join(
854 'rhodecode', '/'.join(
855 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
855 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
856 templates = {
856 templates = {
857 'post-commit': post_commit_tmpl,
857 'post-commit': post_commit_tmpl,
858 'pre-commit': pre_commit_template
858 'pre-commit': pre_commit_template
859 }
859 }
860 for filename in templates:
860 for filename in templates:
861 _hook_file = os.path.join(hooks_path, filename)
861 _hook_file = os.path.join(hooks_path, filename)
862 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
862 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
863 if _rhodecode_hook or force_create:
863 if _rhodecode_hook or force_create:
864 log.debug('writing %s hook file !', filename)
864 log.debug('writing %s hook file !', filename)
865 template = templates[filename]
865 template = templates[filename]
866 try:
866 try:
867 with open(_hook_file, 'wb') as f:
867 with open(_hook_file, 'wb') as f:
868 template = template.replace(
868 template = template.replace(
869 '_TMPL_', rhodecode.__version__)
869 '_TMPL_', rhodecode.__version__)
870 template = template.replace('_ENV_', sys.executable)
870 template = template.replace('_ENV_', sys.executable)
871 f.write(template)
871 f.write(template)
872 os.chmod(_hook_file, 0755)
872 os.chmod(_hook_file, 0755)
873 except IOError:
873 except IOError:
874 log.exception('error writing hook file %s', filename)
874 log.exception('error writing hook file %s', filename)
875 else:
875 else:
876 log.debug('skipping writing hook file')
876 log.debug('skipping writing hook file')
877
877
878 def install_hooks(self, repo, repo_type):
878 def install_hooks(self, repo, repo_type):
879 if repo_type == 'git':
879 if repo_type == 'git':
880 self.install_git_hook(repo)
880 self.install_git_hook(repo)
881 elif repo_type == 'svn':
881 elif repo_type == 'svn':
882 self.install_svn_hooks(repo)
882 self.install_svn_hooks(repo)
883
883
884 def get_server_info(self, environ=None):
884 def get_server_info(self, environ=None):
885 import platform
885 import platform
886 import rhodecode
886 import rhodecode
887 import pkg_resources
887 import pkg_resources
888 from rhodecode.model.meta import Base as sql_base, Session
888 from rhodecode.model.meta import Base as sql_base, Session
889 from sqlalchemy.engine import url
889 from sqlalchemy.engine import url
890 from rhodecode.lib.base import get_server_ip_addr, get_server_port
890 from rhodecode.lib.base import get_server_ip_addr, get_server_port
891 from rhodecode.lib.vcs.backends.git import discover_git_version
891 from rhodecode.lib.vcs.backends.git import discover_git_version
892 from rhodecode.model.gist import GIST_STORE_LOC
892 from rhodecode.model.gist import GIST_STORE_LOC
893
893
894 def percentage(part, whole):
894 def percentage(part, whole):
895 return 100 * float(part) / float(whole)
895 whole = float(whole)
896 if whole > 0:
897 return 100 * float(part) / whole
898 return 0
896
899
897 try:
900 try:
898 # cygwin cannot have yet psutil support.
901 # cygwin cannot have yet psutil support.
899 import psutil
902 import psutil
900 except ImportError:
903 except ImportError:
901 psutil = None
904 psutil = None
902
905
903 environ = environ or {}
906 environ = environ or {}
904 _NA = 'NOT AVAILABLE'
907 _NA = 'NOT AVAILABLE'
905 _memory = _NA
908 _memory = _NA
906 _uptime = _NA
909 _uptime = _NA
907 _boot_time = _NA
910 _boot_time = _NA
908 _cpu = _NA
911 _cpu = _NA
909 _disk = dict(percent=0, used=0, total=0, error='')
912 _disk = dict(percent=0, used=0, total=0, error='')
910 _disk_inodes = dict(percent=0, free=0, used=0, total=0, error='')
913 _disk_inodes = dict(percent=0, free=0, used=0, total=0, error='')
911 _load = {'1_min': _NA, '5_min': _NA, '15_min': _NA}
914 _load = {'1_min': _NA, '5_min': _NA, '15_min': _NA}
912
915
913 model = VcsSettingsModel()
916 model = VcsSettingsModel()
914 storage_path = model.get_repos_location()
917 storage_path = model.get_repos_location()
915 gist_storage_path = os.path.join(storage_path, GIST_STORE_LOC)
918 gist_storage_path = os.path.join(storage_path, GIST_STORE_LOC)
916 archive_storage_path = rhodecode.CONFIG.get('archive_cache_dir', '')
919 archive_storage_path = rhodecode.CONFIG.get('archive_cache_dir', '')
917 search_index_storage_path = rhodecode.CONFIG.get('search.location', '')
920 search_index_storage_path = rhodecode.CONFIG.get('search.location', '')
918
921
919 if psutil:
922 if psutil:
920 # disk storage
923 # disk storage
921 try:
924 try:
922 _disk = dict(psutil.disk_usage(storage_path)._asdict())
925 _disk = dict(psutil.disk_usage(storage_path)._asdict())
923 except Exception as e:
926 except Exception as e:
924 log.exception('Failed to fetch disk info')
927 log.exception('Failed to fetch disk info')
925 _disk = {'percent': 0, 'used': 0, 'total': 0, 'error': str(e)}
928 _disk = {'percent': 0, 'used': 0, 'total': 0, 'error': str(e)}
926
929
927 # disk inodes usage
930 # disk inodes usage
928 try:
931 try:
929 i_stat = os.statvfs(storage_path)
932 i_stat = os.statvfs(storage_path)
930
933
931 _disk_inodes['used'] = i_stat.f_ffree
934 _disk_inodes['used'] = i_stat.f_ffree
932 _disk_inodes['free'] = i_stat.f_favail
935 _disk_inodes['free'] = i_stat.f_favail
933 _disk_inodes['total'] = i_stat.f_files
936 _disk_inodes['total'] = i_stat.f_files
934 _disk_inodes['percent'] = percentage(
937 _disk_inodes['percent'] = percentage(
935 _disk_inodes['used'], _disk_inodes['total'])
938 _disk_inodes['used'], _disk_inodes['total'])
936 except Exception as e:
939 except Exception as e:
937 log.exception('Failed to fetch disk inodes info')
940 log.exception('Failed to fetch disk inodes info')
938 _disk_inodes['error'] = str(e)
941 _disk_inodes['error'] = str(e)
939
942
940 # memory
943 # memory
941 _memory = dict(psutil.virtual_memory()._asdict())
944 _memory = dict(psutil.virtual_memory()._asdict())
942 _memory['percent2'] = psutil._common.usage_percent(
945 _memory['percent2'] = psutil._common.usage_percent(
943 (_memory['total'] - _memory['free']),
946 (_memory['total'] - _memory['free']),
944 _memory['total'], 1)
947 _memory['total'], 1)
945
948
946 # load averages
949 # load averages
947 if hasattr(psutil.os, 'getloadavg'):
950 if hasattr(psutil.os, 'getloadavg'):
948 _load = dict(zip(
951 _load = dict(zip(
949 ['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
952 ['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
950 _uptime = time.time() - psutil.boot_time()
953 _uptime = time.time() - psutil.boot_time()
951 _boot_time = psutil.boot_time()
954 _boot_time = psutil.boot_time()
952 _cpu = psutil.cpu_percent(0.5)
955 _cpu = psutil.cpu_percent(0.5)
953
956
954 mods = dict([(p.project_name, p.version)
957 mods = dict([(p.project_name, p.version)
955 for p in pkg_resources.working_set])
958 for p in pkg_resources.working_set])
956
959
957 def get_storage_size(storage_path):
960 def get_storage_size(storage_path):
958 sizes = []
961 sizes = []
959 for file_ in os.listdir(storage_path):
962 for file_ in os.listdir(storage_path):
960 storage_file = os.path.join(storage_path, file_)
963 storage_file = os.path.join(storage_path, file_)
961 if os.path.isfile(storage_file):
964 if os.path.isfile(storage_file):
962 try:
965 try:
963 sizes.append(os.path.getsize(storage_file))
966 sizes.append(os.path.getsize(storage_file))
964 except OSError:
967 except OSError:
965 log.exception('Failed to get size of storage file %s',
968 log.exception('Failed to get size of storage file %s',
966 storage_file)
969 storage_file)
967 pass
970 pass
968
971
969 return sum(sizes)
972 return sum(sizes)
970
973
971 # archive cache storage
974 # archive cache storage
972 _disk_archive = {'percent': 0, 'used': 0, 'total': 0}
975 _disk_archive = {'percent': 0, 'used': 0, 'total': 0}
973 try:
976 try:
974 archive_storage_path_exists = os.path.isdir(
977 archive_storage_path_exists = os.path.isdir(
975 archive_storage_path)
978 archive_storage_path)
976 if archive_storage_path and archive_storage_path_exists:
979 if archive_storage_path and archive_storage_path_exists:
977 used = get_storage_size(archive_storage_path)
980 used = get_storage_size(archive_storage_path)
978 _disk_archive.update({
981 _disk_archive.update({
979 'used': used,
982 'used': used,
980 'total': used,
983 'total': used,
981 })
984 })
982 except Exception as e:
985 except Exception as e:
983 log.exception('failed to fetch archive cache storage')
986 log.exception('failed to fetch archive cache storage')
984 _disk_archive['error'] = str(e)
987 _disk_archive['error'] = str(e)
985
988
986 # search index storage
989 # search index storage
987 _disk_index = {'percent': 0, 'used': 0, 'total': 0}
990 _disk_index = {'percent': 0, 'used': 0, 'total': 0}
988 try:
991 try:
989 search_index_storage_path_exists = os.path.isdir(
992 search_index_storage_path_exists = os.path.isdir(
990 search_index_storage_path)
993 search_index_storage_path)
991 if search_index_storage_path_exists:
994 if search_index_storage_path_exists:
992 used = get_storage_size(search_index_storage_path)
995 used = get_storage_size(search_index_storage_path)
993 _disk_index.update({
996 _disk_index.update({
994 'percent': 100,
997 'percent': 100,
995 'used': used,
998 'used': used,
996 'total': used,
999 'total': used,
997 })
1000 })
998 except Exception as e:
1001 except Exception as e:
999 log.exception('failed to fetch search index storage')
1002 log.exception('failed to fetch search index storage')
1000 _disk_index['error'] = str(e)
1003 _disk_index['error'] = str(e)
1001
1004
1002 # gist storage
1005 # gist storage
1003 _disk_gist = {'percent': 0, 'used': 0, 'total': 0, 'items': 0}
1006 _disk_gist = {'percent': 0, 'used': 0, 'total': 0, 'items': 0}
1004 try:
1007 try:
1005 items_count = 0
1008 items_count = 0
1006 used = 0
1009 used = 0
1007 for root, dirs, files in os.walk(safe_str(gist_storage_path)):
1010 for root, dirs, files in os.walk(safe_str(gist_storage_path)):
1008 if root == gist_storage_path:
1011 if root == gist_storage_path:
1009 items_count = len(dirs)
1012 items_count = len(dirs)
1010
1013
1011 for f in files:
1014 for f in files:
1012 try:
1015 try:
1013 used += os.path.getsize(os.path.join(root, f))
1016 used += os.path.getsize(os.path.join(root, f))
1014 except OSError:
1017 except OSError:
1015 pass
1018 pass
1016 _disk_gist.update({
1019 _disk_gist.update({
1017 'percent': 100,
1020 'percent': 100,
1018 'used': used,
1021 'used': used,
1019 'total': used,
1022 'total': used,
1020 'items': items_count
1023 'items': items_count
1021 })
1024 })
1022 except Exception as e:
1025 except Exception as e:
1023 log.exception('failed to fetch gist storage items')
1026 log.exception('failed to fetch gist storage items')
1024 _disk_gist['error'] = str(e)
1027 _disk_gist['error'] = str(e)
1025
1028
1026 # GIT info
1029 # GIT info
1027 git_ver = discover_git_version()
1030 git_ver = discover_git_version()
1028
1031
1029 # SVN info
1032 # SVN info
1030 # TODO: johbo: Add discover_svn_version to replace this code.
1033 # TODO: johbo: Add discover_svn_version to replace this code.
1031 try:
1034 try:
1032 import svn.core
1035 import svn.core
1033 svn_ver = svn.core.SVN_VERSION
1036 svn_ver = svn.core.SVN_VERSION
1034 except ImportError:
1037 except ImportError:
1035 svn_ver = None
1038 svn_ver = None
1036
1039
1037 # DB stuff
1040 # DB stuff
1038 db_info = url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
1041 db_info = url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
1039 db_type = db_info.__to_string__()
1042 db_type = db_info.__to_string__()
1040 try:
1043 try:
1041 engine = sql_base.metadata.bind
1044 engine = sql_base.metadata.bind
1042 db_server_info = engine.dialect._get_server_version_info(
1045 db_server_info = engine.dialect._get_server_version_info(
1043 Session.connection(bind=engine))
1046 Session.connection(bind=engine))
1044 db_version = '%s %s' % (db_info.drivername,
1047 db_version = '%s %s' % (db_info.drivername,
1045 '.'.join(map(str, db_server_info)))
1048 '.'.join(map(str, db_server_info)))
1046 except Exception:
1049 except Exception:
1047 log.exception('failed to fetch db version')
1050 log.exception('failed to fetch db version')
1048 db_version = '%s %s' % (db_info.drivername, '?')
1051 db_version = '%s %s' % (db_info.drivername, '?')
1049
1052
1050 db_migrate = DbMigrateVersion.query().filter(
1053 db_migrate = DbMigrateVersion.query().filter(
1051 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
1054 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
1052 db_migrate_version = db_migrate.version
1055 db_migrate_version = db_migrate.version
1053
1056
1054 info = {
1057 info = {
1055 'py_version': ' '.join(platform._sys_version()),
1058 'py_version': ' '.join(platform._sys_version()),
1056 'py_path': sys.executable,
1059 'py_path': sys.executable,
1057 'py_modules': sorted(mods.items(), key=lambda k: k[0].lower()),
1060 'py_modules': sorted(mods.items(), key=lambda k: k[0].lower()),
1058
1061
1059 'platform': safe_unicode(platform.platform()),
1062 'platform': safe_unicode(platform.platform()),
1060 'storage': storage_path,
1063 'storage': storage_path,
1061 'archive_storage': archive_storage_path,
1064 'archive_storage': archive_storage_path,
1062 'index_storage': search_index_storage_path,
1065 'index_storage': search_index_storage_path,
1063 'gist_storage': gist_storage_path,
1066 'gist_storage': gist_storage_path,
1064
1067
1065
1068
1066 'db_type': db_type,
1069 'db_type': db_type,
1067 'db_version': db_version,
1070 'db_version': db_version,
1068 'db_migrate_version': db_migrate_version,
1071 'db_migrate_version': db_migrate_version,
1069
1072
1070 'rhodecode_version': rhodecode.__version__,
1073 'rhodecode_version': rhodecode.__version__,
1071 'rhodecode_config_ini': rhodecode.CONFIG.get('__file__'),
1074 'rhodecode_config_ini': rhodecode.CONFIG.get('__file__'),
1072 'server_ip': '%s:%s' % (
1075 'server_ip': '%s:%s' % (
1073 get_server_ip_addr(environ, log_errors=False),
1076 get_server_ip_addr(environ, log_errors=False),
1074 get_server_port(environ)
1077 get_server_port(environ)
1075 ),
1078 ),
1076 'server_id': rhodecode.CONFIG.get('instance_id'),
1079 'server_id': rhodecode.CONFIG.get('instance_id'),
1077
1080
1078 'git_version': safe_unicode(git_ver),
1081 'git_version': safe_unicode(git_ver),
1079 'hg_version': mods.get('mercurial'),
1082 'hg_version': mods.get('mercurial'),
1080 'svn_version': svn_ver,
1083 'svn_version': svn_ver,
1081
1084
1082 'uptime': _uptime,
1085 'uptime': _uptime,
1083 'boot_time': _boot_time,
1086 'boot_time': _boot_time,
1084 'load': _load,
1087 'load': _load,
1085 'cpu': _cpu,
1088 'cpu': _cpu,
1086 'memory': _memory,
1089 'memory': _memory,
1087 'disk': _disk,
1090 'disk': _disk,
1088 'disk_inodes': _disk_inodes,
1091 'disk_inodes': _disk_inodes,
1089 'disk_archive': _disk_archive,
1092 'disk_archive': _disk_archive,
1090 'disk_gist': _disk_gist,
1093 'disk_gist': _disk_gist,
1091 'disk_index': _disk_index,
1094 'disk_index': _disk_index,
1092 }
1095 }
1093 return info
1096 return info
1094
1097
1095
1098
1096 def _check_rhodecode_hook(hook_path):
1099 def _check_rhodecode_hook(hook_path):
1097 """
1100 """
1098 Check if the hook was created by RhodeCode
1101 Check if the hook was created by RhodeCode
1099 """
1102 """
1100 if not os.path.exists(hook_path):
1103 if not os.path.exists(hook_path):
1101 return True
1104 return True
1102
1105
1103 log.debug('hook exists, checking if it is from rhodecode')
1106 log.debug('hook exists, checking if it is from rhodecode')
1104 hook_content = _read_hook(hook_path)
1107 hook_content = _read_hook(hook_path)
1105 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
1108 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
1106 if matches:
1109 if matches:
1107 try:
1110 try:
1108 version = matches.groups()[0]
1111 version = matches.groups()[0]
1109 log.debug('got %s, it is rhodecode', version)
1112 log.debug('got %s, it is rhodecode', version)
1110 return True
1113 return True
1111 except Exception:
1114 except Exception:
1112 log.exception("Exception while reading the hook version.")
1115 log.exception("Exception while reading the hook version.")
1113
1116
1114 return False
1117 return False
1115
1118
1116
1119
1117 def _read_hook(hook_path):
1120 def _read_hook(hook_path):
1118 with open(hook_path, 'rb') as f:
1121 with open(hook_path, 'rb') as f:
1119 content = f.read()
1122 content = f.read()
1120 return content
1123 return content
General Comments 0
You need to be logged in to leave comments. Login now