##// END OF EJS Templates
system-info: expose inode limits and usage. Fixes #4282
marcink -
r1027:f516f8a4 default
parent child Browse files
Show More
@@ -1,65 +1,67 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import pytest
22 import pytest
23
23
24 from rhodecode.model.scm import ScmModel
24 from rhodecode.model.scm import ScmModel
25 from rhodecode.api.tests.utils import build_data, api_call, assert_ok
25 from rhodecode.api.tests.utils import build_data, api_call, assert_ok
26
26
27
27
28 @pytest.fixture
28 @pytest.fixture
29 def http_host_stub():
29 def http_host_stub():
30 """
30 """
31 To ensure that we can get an IP address, this test shall run with a
31 To ensure that we can get an IP address, this test shall run with a
32 hostname set to "localhost".
32 hostname set to "localhost".
33 """
33 """
34 return 'localhost:80'
34 return 'localhost:80'
35
35
36
36
37 @pytest.mark.usefixtures("testuser_api", "app")
37 @pytest.mark.usefixtures("testuser_api", "app")
38 class TestGetServerInfo(object):
38 class TestGetServerInfo(object):
39 def test_api_get_server_info(self):
39 def test_api_get_server_info(self):
40 id_, params = build_data(self.apikey, 'get_server_info')
40 id_, params = build_data(self.apikey, 'get_server_info')
41 response = api_call(self.app, params)
41 response = api_call(self.app, params)
42 resp = response.json
42 resp = response.json
43 expected = ScmModel().get_server_info()
43 expected = ScmModel().get_server_info()
44 expected['memory'] = resp['result']['memory']
44 expected['memory'] = resp['result']['memory']
45 expected['uptime'] = resp['result']['uptime']
45 expected['uptime'] = resp['result']['uptime']
46 expected['load'] = resp['result']['load']
46 expected['load'] = resp['result']['load']
47 expected['cpu'] = resp['result']['cpu']
47 expected['cpu'] = resp['result']['cpu']
48 expected['disk'] = resp['result']['disk']
48 expected['disk'] = resp['result']['disk']
49 expected['disk_inodes'] = resp['result']['disk_inodes']
49 expected['server_ip'] = '127.0.0.1:80'
50 expected['server_ip'] = '127.0.0.1:80'
50
51
51 assert_ok(id_, expected, given=response.body)
52 assert_ok(id_, expected, given=response.body)
52
53
53 def test_api_get_server_info_ip(self):
54 def test_api_get_server_info_ip(self):
54 id_, params = build_data(self.apikey, 'get_server_info')
55 id_, params = build_data(self.apikey, 'get_server_info')
55 response = api_call(self.app, params)
56 response = api_call(self.app, params)
56 resp = response.json
57 resp = response.json
57 expected = ScmModel().get_server_info({'SERVER_NAME': 'unknown'})
58 expected = ScmModel().get_server_info({'SERVER_NAME': 'unknown'})
58 expected['memory'] = resp['result']['memory']
59 expected['memory'] = resp['result']['memory']
59 expected['uptime'] = resp['result']['uptime']
60 expected['uptime'] = resp['result']['uptime']
60 expected['load'] = resp['result']['load']
61 expected['load'] = resp['result']['load']
61 expected['cpu'] = resp['result']['cpu']
62 expected['cpu'] = resp['result']['cpu']
62 expected['disk'] = resp['result']['disk']
63 expected['disk'] = resp['result']['disk']
64 expected['disk_inodes'] = resp['result']['disk_inodes']
63 expected['server_ip'] = '127.0.0.1:80'
65 expected['server_ip'] = '127.0.0.1:80'
64
66
65 assert_ok(id_, expected, given=response.body)
67 assert_ok(id_, expected, given=response.body)
@@ -1,1102 +1,1120 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import re
26 import re
27 import sys
27 import sys
28 import time
28 import time
29 import traceback
29 import traceback
30 import logging
30 import logging
31 import cStringIO
31 import cStringIO
32 import pkg_resources
32 import pkg_resources
33
33
34 import pylons
34 import pylons
35 from pylons.i18n.translation import _
35 from pylons.i18n.translation import _
36 from sqlalchemy import func
36 from sqlalchemy import func
37 from zope.cachedescriptors.property import Lazy as LazyProperty
37 from zope.cachedescriptors.property import Lazy as LazyProperty
38
38
39 import rhodecode
39 import rhodecode
40 from rhodecode.lib.vcs import get_backend
40 from rhodecode.lib.vcs import get_backend
41 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
41 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
42 from rhodecode.lib.vcs.nodes import FileNode
42 from rhodecode.lib.vcs.nodes import FileNode
43 from rhodecode.lib.vcs.backends.base import EmptyCommit
43 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 from rhodecode.lib import helpers as h
44 from rhodecode.lib import helpers as h
45
45
46 from rhodecode.lib.auth import (
46 from rhodecode.lib.auth import (
47 HasRepoPermissionAny, HasRepoGroupPermissionAny,
47 HasRepoPermissionAny, HasRepoGroupPermissionAny,
48 HasUserGroupPermissionAny)
48 HasUserGroupPermissionAny)
49 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
49 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
50 from rhodecode.lib import hooks_utils, caches
50 from rhodecode.lib import hooks_utils, caches
51 from rhodecode.lib.utils import (
51 from rhodecode.lib.utils import (
52 get_filesystem_repos, action_logger, make_db_config)
52 get_filesystem_repos, action_logger, make_db_config)
53 from rhodecode.lib.utils2 import (
53 from rhodecode.lib.utils2 import (
54 safe_str, safe_unicode, get_server_url, md5)
54 safe_str, safe_unicode, get_server_url, md5)
55 from rhodecode.model import BaseModel
55 from rhodecode.model import BaseModel
56 from rhodecode.model.db import (
56 from rhodecode.model.db import (
57 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
57 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
58 PullRequest, DbMigrateVersion)
58 PullRequest, DbMigrateVersion)
59 from rhodecode.model.settings import VcsSettingsModel
59 from rhodecode.model.settings import VcsSettingsModel
60
60
61 log = logging.getLogger(__name__)
61 log = logging.getLogger(__name__)
62
62
63
63
64 class UserTemp(object):
64 class UserTemp(object):
65 def __init__(self, user_id):
65 def __init__(self, user_id):
66 self.user_id = user_id
66 self.user_id = user_id
67
67
68 def __repr__(self):
68 def __repr__(self):
69 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
69 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
70
70
71
71
72 class RepoTemp(object):
72 class RepoTemp(object):
73 def __init__(self, repo_id):
73 def __init__(self, repo_id):
74 self.repo_id = repo_id
74 self.repo_id = repo_id
75
75
76 def __repr__(self):
76 def __repr__(self):
77 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
77 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
78
78
79
79
80 class SimpleCachedRepoList(object):
80 class SimpleCachedRepoList(object):
81 """
81 """
82 Lighter version of of iteration of repos without the scm initialisation,
82 Lighter version of of iteration of repos without the scm initialisation,
83 and with cache usage
83 and with cache usage
84 """
84 """
85 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
85 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
86 self.db_repo_list = db_repo_list
86 self.db_repo_list = db_repo_list
87 self.repos_path = repos_path
87 self.repos_path = repos_path
88 self.order_by = order_by
88 self.order_by = order_by
89 self.reversed = (order_by or '').startswith('-')
89 self.reversed = (order_by or '').startswith('-')
90 if not perm_set:
90 if not perm_set:
91 perm_set = ['repository.read', 'repository.write',
91 perm_set = ['repository.read', 'repository.write',
92 'repository.admin']
92 'repository.admin']
93 self.perm_set = perm_set
93 self.perm_set = perm_set
94
94
95 def __len__(self):
95 def __len__(self):
96 return len(self.db_repo_list)
96 return len(self.db_repo_list)
97
97
98 def __repr__(self):
98 def __repr__(self):
99 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
99 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
100
100
101 def __iter__(self):
101 def __iter__(self):
102 for dbr in self.db_repo_list:
102 for dbr in self.db_repo_list:
103 # check permission at this level
103 # check permission at this level
104 has_perm = HasRepoPermissionAny(*self.perm_set)(
104 has_perm = HasRepoPermissionAny(*self.perm_set)(
105 dbr.repo_name, 'SimpleCachedRepoList check')
105 dbr.repo_name, 'SimpleCachedRepoList check')
106 if not has_perm:
106 if not has_perm:
107 continue
107 continue
108
108
109 tmp_d = {
109 tmp_d = {
110 'name': dbr.repo_name,
110 'name': dbr.repo_name,
111 'dbrepo': dbr.get_dict(),
111 'dbrepo': dbr.get_dict(),
112 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
112 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
113 }
113 }
114 yield tmp_d
114 yield tmp_d
115
115
116
116
117 class _PermCheckIterator(object):
117 class _PermCheckIterator(object):
118
118
119 def __init__(
119 def __init__(
120 self, obj_list, obj_attr, perm_set, perm_checker,
120 self, obj_list, obj_attr, perm_set, perm_checker,
121 extra_kwargs=None):
121 extra_kwargs=None):
122 """
122 """
123 Creates iterator from given list of objects, additionally
123 Creates iterator from given list of objects, additionally
124 checking permission for them from perm_set var
124 checking permission for them from perm_set var
125
125
126 :param obj_list: list of db objects
126 :param obj_list: list of db objects
127 :param obj_attr: attribute of object to pass into perm_checker
127 :param obj_attr: attribute of object to pass into perm_checker
128 :param perm_set: list of permissions to check
128 :param perm_set: list of permissions to check
129 :param perm_checker: callable to check permissions against
129 :param perm_checker: callable to check permissions against
130 """
130 """
131 self.obj_list = obj_list
131 self.obj_list = obj_list
132 self.obj_attr = obj_attr
132 self.obj_attr = obj_attr
133 self.perm_set = perm_set
133 self.perm_set = perm_set
134 self.perm_checker = perm_checker
134 self.perm_checker = perm_checker
135 self.extra_kwargs = extra_kwargs or {}
135 self.extra_kwargs = extra_kwargs or {}
136
136
137 def __len__(self):
137 def __len__(self):
138 return len(self.obj_list)
138 return len(self.obj_list)
139
139
140 def __repr__(self):
140 def __repr__(self):
141 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
141 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
142
142
143 def __iter__(self):
143 def __iter__(self):
144 checker = self.perm_checker(*self.perm_set)
144 checker = self.perm_checker(*self.perm_set)
145 for db_obj in self.obj_list:
145 for db_obj in self.obj_list:
146 # check permission at this level
146 # check permission at this level
147 name = getattr(db_obj, self.obj_attr, None)
147 name = getattr(db_obj, self.obj_attr, None)
148 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
148 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
149 continue
149 continue
150
150
151 yield db_obj
151 yield db_obj
152
152
153
153
154 class RepoList(_PermCheckIterator):
154 class RepoList(_PermCheckIterator):
155
155
156 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
156 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
157 if not perm_set:
157 if not perm_set:
158 perm_set = [
158 perm_set = [
159 'repository.read', 'repository.write', 'repository.admin']
159 'repository.read', 'repository.write', 'repository.admin']
160
160
161 super(RepoList, self).__init__(
161 super(RepoList, self).__init__(
162 obj_list=db_repo_list,
162 obj_list=db_repo_list,
163 obj_attr='repo_name', perm_set=perm_set,
163 obj_attr='repo_name', perm_set=perm_set,
164 perm_checker=HasRepoPermissionAny,
164 perm_checker=HasRepoPermissionAny,
165 extra_kwargs=extra_kwargs)
165 extra_kwargs=extra_kwargs)
166
166
167
167
168 class RepoGroupList(_PermCheckIterator):
168 class RepoGroupList(_PermCheckIterator):
169
169
170 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
170 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
171 if not perm_set:
171 if not perm_set:
172 perm_set = ['group.read', 'group.write', 'group.admin']
172 perm_set = ['group.read', 'group.write', 'group.admin']
173
173
174 super(RepoGroupList, self).__init__(
174 super(RepoGroupList, self).__init__(
175 obj_list=db_repo_group_list,
175 obj_list=db_repo_group_list,
176 obj_attr='group_name', perm_set=perm_set,
176 obj_attr='group_name', perm_set=perm_set,
177 perm_checker=HasRepoGroupPermissionAny,
177 perm_checker=HasRepoGroupPermissionAny,
178 extra_kwargs=extra_kwargs)
178 extra_kwargs=extra_kwargs)
179
179
180
180
181 class UserGroupList(_PermCheckIterator):
181 class UserGroupList(_PermCheckIterator):
182
182
183 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
183 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
184 if not perm_set:
184 if not perm_set:
185 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
185 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
186
186
187 super(UserGroupList, self).__init__(
187 super(UserGroupList, self).__init__(
188 obj_list=db_user_group_list,
188 obj_list=db_user_group_list,
189 obj_attr='users_group_name', perm_set=perm_set,
189 obj_attr='users_group_name', perm_set=perm_set,
190 perm_checker=HasUserGroupPermissionAny,
190 perm_checker=HasUserGroupPermissionAny,
191 extra_kwargs=extra_kwargs)
191 extra_kwargs=extra_kwargs)
192
192
193
193
194 class ScmModel(BaseModel):
194 class ScmModel(BaseModel):
195 """
195 """
196 Generic Scm Model
196 Generic Scm Model
197 """
197 """
198
198
199 @LazyProperty
199 @LazyProperty
200 def repos_path(self):
200 def repos_path(self):
201 """
201 """
202 Gets the repositories root path from database
202 Gets the repositories root path from database
203 """
203 """
204
204
205 settings_model = VcsSettingsModel(sa=self.sa)
205 settings_model = VcsSettingsModel(sa=self.sa)
206 return settings_model.get_repos_location()
206 return settings_model.get_repos_location()
207
207
208 def repo_scan(self, repos_path=None):
208 def repo_scan(self, repos_path=None):
209 """
209 """
210 Listing of repositories in given path. This path should not be a
210 Listing of repositories in given path. This path should not be a
211 repository itself. Return a dictionary of repository objects
211 repository itself. Return a dictionary of repository objects
212
212
213 :param repos_path: path to directory containing repositories
213 :param repos_path: path to directory containing repositories
214 """
214 """
215
215
216 if repos_path is None:
216 if repos_path is None:
217 repos_path = self.repos_path
217 repos_path = self.repos_path
218
218
219 log.info('scanning for repositories in %s', repos_path)
219 log.info('scanning for repositories in %s', repos_path)
220
220
221 config = make_db_config()
221 config = make_db_config()
222 config.set('extensions', 'largefiles', '')
222 config.set('extensions', 'largefiles', '')
223 repos = {}
223 repos = {}
224
224
225 for name, path in get_filesystem_repos(repos_path, recursive=True):
225 for name, path in get_filesystem_repos(repos_path, recursive=True):
226 # name need to be decomposed and put back together using the /
226 # name need to be decomposed and put back together using the /
227 # since this is internal storage separator for rhodecode
227 # since this is internal storage separator for rhodecode
228 name = Repository.normalize_repo_name(name)
228 name = Repository.normalize_repo_name(name)
229
229
230 try:
230 try:
231 if name in repos:
231 if name in repos:
232 raise RepositoryError('Duplicate repository name %s '
232 raise RepositoryError('Duplicate repository name %s '
233 'found in %s' % (name, path))
233 'found in %s' % (name, path))
234 elif path[0] in rhodecode.BACKENDS:
234 elif path[0] in rhodecode.BACKENDS:
235 klass = get_backend(path[0])
235 klass = get_backend(path[0])
236 repos[name] = klass(path[1], config=config)
236 repos[name] = klass(path[1], config=config)
237 except OSError:
237 except OSError:
238 continue
238 continue
239 log.debug('found %s paths with repositories', len(repos))
239 log.debug('found %s paths with repositories', len(repos))
240 return repos
240 return repos
241
241
242 def get_repos(self, all_repos=None, sort_key=None):
242 def get_repos(self, all_repos=None, sort_key=None):
243 """
243 """
244 Get all repositories from db and for each repo create it's
244 Get all repositories from db and for each repo create it's
245 backend instance and fill that backed with information from database
245 backend instance and fill that backed with information from database
246
246
247 :param all_repos: list of repository names as strings
247 :param all_repos: list of repository names as strings
248 give specific repositories list, good for filtering
248 give specific repositories list, good for filtering
249
249
250 :param sort_key: initial sorting of repositories
250 :param sort_key: initial sorting of repositories
251 """
251 """
252 if all_repos is None:
252 if all_repos is None:
253 all_repos = self.sa.query(Repository)\
253 all_repos = self.sa.query(Repository)\
254 .filter(Repository.group_id == None)\
254 .filter(Repository.group_id == None)\
255 .order_by(func.lower(Repository.repo_name)).all()
255 .order_by(func.lower(Repository.repo_name)).all()
256 repo_iter = SimpleCachedRepoList(
256 repo_iter = SimpleCachedRepoList(
257 all_repos, repos_path=self.repos_path, order_by=sort_key)
257 all_repos, repos_path=self.repos_path, order_by=sort_key)
258 return repo_iter
258 return repo_iter
259
259
260 def get_repo_groups(self, all_groups=None):
260 def get_repo_groups(self, all_groups=None):
261 if all_groups is None:
261 if all_groups is None:
262 all_groups = RepoGroup.query()\
262 all_groups = RepoGroup.query()\
263 .filter(RepoGroup.group_parent_id == None).all()
263 .filter(RepoGroup.group_parent_id == None).all()
264 return [x for x in RepoGroupList(all_groups)]
264 return [x for x in RepoGroupList(all_groups)]
265
265
266 def mark_for_invalidation(self, repo_name, delete=False):
266 def mark_for_invalidation(self, repo_name, delete=False):
267 """
267 """
268 Mark caches of this repo invalid in the database. `delete` flag
268 Mark caches of this repo invalid in the database. `delete` flag
269 removes the cache entries
269 removes the cache entries
270
270
271 :param repo_name: the repo_name for which caches should be marked
271 :param repo_name: the repo_name for which caches should be marked
272 invalid, or deleted
272 invalid, or deleted
273 :param delete: delete the entry keys instead of setting bool
273 :param delete: delete the entry keys instead of setting bool
274 flag on them
274 flag on them
275 """
275 """
276 CacheKey.set_invalidate(repo_name, delete=delete)
276 CacheKey.set_invalidate(repo_name, delete=delete)
277 repo = Repository.get_by_repo_name(repo_name)
277 repo = Repository.get_by_repo_name(repo_name)
278
278
279 if repo:
279 if repo:
280 config = repo._config
280 config = repo._config
281 config.set('extensions', 'largefiles', '')
281 config.set('extensions', 'largefiles', '')
282 repo.update_commit_cache(config=config, cs_cache=None)
282 repo.update_commit_cache(config=config, cs_cache=None)
283 caches.clear_repo_caches(repo_name)
283 caches.clear_repo_caches(repo_name)
284
284
285 def toggle_following_repo(self, follow_repo_id, user_id):
285 def toggle_following_repo(self, follow_repo_id, user_id):
286
286
287 f = self.sa.query(UserFollowing)\
287 f = self.sa.query(UserFollowing)\
288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 .filter(UserFollowing.user_id == user_id).scalar()
289 .filter(UserFollowing.user_id == user_id).scalar()
290
290
291 if f is not None:
291 if f is not None:
292 try:
292 try:
293 self.sa.delete(f)
293 self.sa.delete(f)
294 action_logger(UserTemp(user_id),
294 action_logger(UserTemp(user_id),
295 'stopped_following_repo',
295 'stopped_following_repo',
296 RepoTemp(follow_repo_id))
296 RepoTemp(follow_repo_id))
297 return
297 return
298 except Exception:
298 except Exception:
299 log.error(traceback.format_exc())
299 log.error(traceback.format_exc())
300 raise
300 raise
301
301
302 try:
302 try:
303 f = UserFollowing()
303 f = UserFollowing()
304 f.user_id = user_id
304 f.user_id = user_id
305 f.follows_repo_id = follow_repo_id
305 f.follows_repo_id = follow_repo_id
306 self.sa.add(f)
306 self.sa.add(f)
307
307
308 action_logger(UserTemp(user_id),
308 action_logger(UserTemp(user_id),
309 'started_following_repo',
309 'started_following_repo',
310 RepoTemp(follow_repo_id))
310 RepoTemp(follow_repo_id))
311 except Exception:
311 except Exception:
312 log.error(traceback.format_exc())
312 log.error(traceback.format_exc())
313 raise
313 raise
314
314
315 def toggle_following_user(self, follow_user_id, user_id):
315 def toggle_following_user(self, follow_user_id, user_id):
316 f = self.sa.query(UserFollowing)\
316 f = self.sa.query(UserFollowing)\
317 .filter(UserFollowing.follows_user_id == follow_user_id)\
317 .filter(UserFollowing.follows_user_id == follow_user_id)\
318 .filter(UserFollowing.user_id == user_id).scalar()
318 .filter(UserFollowing.user_id == user_id).scalar()
319
319
320 if f is not None:
320 if f is not None:
321 try:
321 try:
322 self.sa.delete(f)
322 self.sa.delete(f)
323 return
323 return
324 except Exception:
324 except Exception:
325 log.error(traceback.format_exc())
325 log.error(traceback.format_exc())
326 raise
326 raise
327
327
328 try:
328 try:
329 f = UserFollowing()
329 f = UserFollowing()
330 f.user_id = user_id
330 f.user_id = user_id
331 f.follows_user_id = follow_user_id
331 f.follows_user_id = follow_user_id
332 self.sa.add(f)
332 self.sa.add(f)
333 except Exception:
333 except Exception:
334 log.error(traceback.format_exc())
334 log.error(traceback.format_exc())
335 raise
335 raise
336
336
337 def is_following_repo(self, repo_name, user_id, cache=False):
337 def is_following_repo(self, repo_name, user_id, cache=False):
338 r = self.sa.query(Repository)\
338 r = self.sa.query(Repository)\
339 .filter(Repository.repo_name == repo_name).scalar()
339 .filter(Repository.repo_name == repo_name).scalar()
340
340
341 f = self.sa.query(UserFollowing)\
341 f = self.sa.query(UserFollowing)\
342 .filter(UserFollowing.follows_repository == r)\
342 .filter(UserFollowing.follows_repository == r)\
343 .filter(UserFollowing.user_id == user_id).scalar()
343 .filter(UserFollowing.user_id == user_id).scalar()
344
344
345 return f is not None
345 return f is not None
346
346
347 def is_following_user(self, username, user_id, cache=False):
347 def is_following_user(self, username, user_id, cache=False):
348 u = User.get_by_username(username)
348 u = User.get_by_username(username)
349
349
350 f = self.sa.query(UserFollowing)\
350 f = self.sa.query(UserFollowing)\
351 .filter(UserFollowing.follows_user == u)\
351 .filter(UserFollowing.follows_user == u)\
352 .filter(UserFollowing.user_id == user_id).scalar()
352 .filter(UserFollowing.user_id == user_id).scalar()
353
353
354 return f is not None
354 return f is not None
355
355
356 def get_followers(self, repo):
356 def get_followers(self, repo):
357 repo = self._get_repo(repo)
357 repo = self._get_repo(repo)
358
358
359 return self.sa.query(UserFollowing)\
359 return self.sa.query(UserFollowing)\
360 .filter(UserFollowing.follows_repository == repo).count()
360 .filter(UserFollowing.follows_repository == repo).count()
361
361
362 def get_forks(self, repo):
362 def get_forks(self, repo):
363 repo = self._get_repo(repo)
363 repo = self._get_repo(repo)
364 return self.sa.query(Repository)\
364 return self.sa.query(Repository)\
365 .filter(Repository.fork == repo).count()
365 .filter(Repository.fork == repo).count()
366
366
367 def get_pull_requests(self, repo):
367 def get_pull_requests(self, repo):
368 repo = self._get_repo(repo)
368 repo = self._get_repo(repo)
369 return self.sa.query(PullRequest)\
369 return self.sa.query(PullRequest)\
370 .filter(PullRequest.target_repo == repo)\
370 .filter(PullRequest.target_repo == repo)\
371 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
371 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
372
372
373 def mark_as_fork(self, repo, fork, user):
373 def mark_as_fork(self, repo, fork, user):
374 repo = self._get_repo(repo)
374 repo = self._get_repo(repo)
375 fork = self._get_repo(fork)
375 fork = self._get_repo(fork)
376 if fork and repo.repo_id == fork.repo_id:
376 if fork and repo.repo_id == fork.repo_id:
377 raise Exception("Cannot set repository as fork of itself")
377 raise Exception("Cannot set repository as fork of itself")
378
378
379 if fork and repo.repo_type != fork.repo_type:
379 if fork and repo.repo_type != fork.repo_type:
380 raise RepositoryError(
380 raise RepositoryError(
381 "Cannot set repository as fork of repository with other type")
381 "Cannot set repository as fork of repository with other type")
382
382
383 repo.fork = fork
383 repo.fork = fork
384 self.sa.add(repo)
384 self.sa.add(repo)
385 return repo
385 return repo
386
386
387 def pull_changes(self, repo, username):
387 def pull_changes(self, repo, username):
388 dbrepo = self._get_repo(repo)
388 dbrepo = self._get_repo(repo)
389 clone_uri = dbrepo.clone_uri
389 clone_uri = dbrepo.clone_uri
390 if not clone_uri:
390 if not clone_uri:
391 raise Exception("This repository doesn't have a clone uri")
391 raise Exception("This repository doesn't have a clone uri")
392
392
393 repo = dbrepo.scm_instance(cache=False)
393 repo = dbrepo.scm_instance(cache=False)
394 # TODO: marcink fix this an re-enable since we need common logic
394 # TODO: marcink fix this an re-enable since we need common logic
395 # for hg/git remove hooks so we don't trigger them on fetching
395 # for hg/git remove hooks so we don't trigger them on fetching
396 # commits from remote
396 # commits from remote
397 repo.config.clear_section('hooks')
397 repo.config.clear_section('hooks')
398
398
399 repo_name = dbrepo.repo_name
399 repo_name = dbrepo.repo_name
400 try:
400 try:
401 # TODO: we need to make sure those operations call proper hooks !
401 # TODO: we need to make sure those operations call proper hooks !
402 repo.pull(clone_uri)
402 repo.pull(clone_uri)
403
403
404 self.mark_for_invalidation(repo_name)
404 self.mark_for_invalidation(repo_name)
405 except Exception:
405 except Exception:
406 log.error(traceback.format_exc())
406 log.error(traceback.format_exc())
407 raise
407 raise
408
408
409 def commit_change(self, repo, repo_name, commit, user, author, message,
409 def commit_change(self, repo, repo_name, commit, user, author, message,
410 content, f_path):
410 content, f_path):
411 """
411 """
412 Commits changes
412 Commits changes
413
413
414 :param repo: SCM instance
414 :param repo: SCM instance
415
415
416 """
416 """
417 user = self._get_user(user)
417 user = self._get_user(user)
418
418
419 # decoding here will force that we have proper encoded values
419 # decoding here will force that we have proper encoded values
420 # in any other case this will throw exceptions and deny commit
420 # in any other case this will throw exceptions and deny commit
421 content = safe_str(content)
421 content = safe_str(content)
422 path = safe_str(f_path)
422 path = safe_str(f_path)
423 # message and author needs to be unicode
423 # message and author needs to be unicode
424 # proper backend should then translate that into required type
424 # proper backend should then translate that into required type
425 message = safe_unicode(message)
425 message = safe_unicode(message)
426 author = safe_unicode(author)
426 author = safe_unicode(author)
427 imc = repo.in_memory_commit
427 imc = repo.in_memory_commit
428 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
428 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
429 try:
429 try:
430 # TODO: handle pre-push action !
430 # TODO: handle pre-push action !
431 tip = imc.commit(
431 tip = imc.commit(
432 message=message, author=author, parents=[commit],
432 message=message, author=author, parents=[commit],
433 branch=commit.branch)
433 branch=commit.branch)
434 except Exception as e:
434 except Exception as e:
435 log.error(traceback.format_exc())
435 log.error(traceback.format_exc())
436 raise IMCCommitError(str(e))
436 raise IMCCommitError(str(e))
437 finally:
437 finally:
438 # always clear caches, if commit fails we want fresh object also
438 # always clear caches, if commit fails we want fresh object also
439 self.mark_for_invalidation(repo_name)
439 self.mark_for_invalidation(repo_name)
440
440
441 # We trigger the post-push action
441 # We trigger the post-push action
442 hooks_utils.trigger_post_push_hook(
442 hooks_utils.trigger_post_push_hook(
443 username=user.username, action='push_local', repo_name=repo_name,
443 username=user.username, action='push_local', repo_name=repo_name,
444 repo_alias=repo.alias, commit_ids=[tip.raw_id])
444 repo_alias=repo.alias, commit_ids=[tip.raw_id])
445 return tip
445 return tip
446
446
447 def _sanitize_path(self, f_path):
447 def _sanitize_path(self, f_path):
448 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
448 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
449 raise NonRelativePathError('%s is not an relative path' % f_path)
449 raise NonRelativePathError('%s is not an relative path' % f_path)
450 if f_path:
450 if f_path:
451 f_path = os.path.normpath(f_path)
451 f_path = os.path.normpath(f_path)
452 return f_path
452 return f_path
453
453
454 def get_dirnode_metadata(self, commit, dir_node):
454 def get_dirnode_metadata(self, commit, dir_node):
455 if not dir_node.is_dir():
455 if not dir_node.is_dir():
456 return []
456 return []
457
457
458 data = []
458 data = []
459 for node in dir_node:
459 for node in dir_node:
460 if not node.is_file():
460 if not node.is_file():
461 # we skip file-nodes
461 # we skip file-nodes
462 continue
462 continue
463
463
464 last_commit = node.last_commit
464 last_commit = node.last_commit
465 last_commit_date = last_commit.date
465 last_commit_date = last_commit.date
466 data.append({
466 data.append({
467 'name': node.name,
467 'name': node.name,
468 'size': h.format_byte_size_binary(node.size),
468 'size': h.format_byte_size_binary(node.size),
469 'modified_at': h.format_date(last_commit_date),
469 'modified_at': h.format_date(last_commit_date),
470 'modified_ts': last_commit_date.isoformat(),
470 'modified_ts': last_commit_date.isoformat(),
471 'revision': last_commit.revision,
471 'revision': last_commit.revision,
472 'short_id': last_commit.short_id,
472 'short_id': last_commit.short_id,
473 'message': h.escape(last_commit.message),
473 'message': h.escape(last_commit.message),
474 'author': h.escape(last_commit.author),
474 'author': h.escape(last_commit.author),
475 'user_profile': h.gravatar_with_user(last_commit.author),
475 'user_profile': h.gravatar_with_user(last_commit.author),
476 })
476 })
477
477
478 return data
478 return data
479
479
480 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
480 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
481 extended_info=False, content=False, max_file_bytes=None):
481 extended_info=False, content=False, max_file_bytes=None):
482 """
482 """
483 recursive walk in root dir and return a set of all path in that dir
483 recursive walk in root dir and return a set of all path in that dir
484 based on repository walk function
484 based on repository walk function
485
485
486 :param repo_name: name of repository
486 :param repo_name: name of repository
487 :param commit_id: commit id for which to list nodes
487 :param commit_id: commit id for which to list nodes
488 :param root_path: root path to list
488 :param root_path: root path to list
489 :param flat: return as a list, if False returns a dict with description
489 :param flat: return as a list, if False returns a dict with description
490 :param max_file_bytes: will not return file contents over this limit
490 :param max_file_bytes: will not return file contents over this limit
491
491
492 """
492 """
493 _files = list()
493 _files = list()
494 _dirs = list()
494 _dirs = list()
495 try:
495 try:
496 _repo = self._get_repo(repo_name)
496 _repo = self._get_repo(repo_name)
497 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
497 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
498 root_path = root_path.lstrip('/')
498 root_path = root_path.lstrip('/')
499 for __, dirs, files in commit.walk(root_path):
499 for __, dirs, files in commit.walk(root_path):
500 for f in files:
500 for f in files:
501 _content = None
501 _content = None
502 _data = f.unicode_path
502 _data = f.unicode_path
503 over_size_limit = (max_file_bytes is not None
503 over_size_limit = (max_file_bytes is not None
504 and f.size > max_file_bytes)
504 and f.size > max_file_bytes)
505
505
506 if not flat:
506 if not flat:
507 _data = {
507 _data = {
508 "name": f.unicode_path,
508 "name": f.unicode_path,
509 "type": "file",
509 "type": "file",
510 }
510 }
511 if extended_info:
511 if extended_info:
512 _data.update({
512 _data.update({
513 "md5": f.md5,
513 "md5": f.md5,
514 "binary": f.is_binary,
514 "binary": f.is_binary,
515 "size": f.size,
515 "size": f.size,
516 "extension": f.extension,
516 "extension": f.extension,
517 "mimetype": f.mimetype,
517 "mimetype": f.mimetype,
518 "lines": f.lines()[0]
518 "lines": f.lines()[0]
519 })
519 })
520
520
521 if content:
521 if content:
522 full_content = None
522 full_content = None
523 if not f.is_binary and not over_size_limit:
523 if not f.is_binary and not over_size_limit:
524 full_content = safe_str(f.content)
524 full_content = safe_str(f.content)
525
525
526 _data.update({
526 _data.update({
527 "content": full_content,
527 "content": full_content,
528 })
528 })
529 _files.append(_data)
529 _files.append(_data)
530 for d in dirs:
530 for d in dirs:
531 _data = d.unicode_path
531 _data = d.unicode_path
532 if not flat:
532 if not flat:
533 _data = {
533 _data = {
534 "name": d.unicode_path,
534 "name": d.unicode_path,
535 "type": "dir",
535 "type": "dir",
536 }
536 }
537 if extended_info:
537 if extended_info:
538 _data.update({
538 _data.update({
539 "md5": None,
539 "md5": None,
540 "binary": None,
540 "binary": None,
541 "size": None,
541 "size": None,
542 "extension": None,
542 "extension": None,
543 })
543 })
544 if content:
544 if content:
545 _data.update({
545 _data.update({
546 "content": None
546 "content": None
547 })
547 })
548 _dirs.append(_data)
548 _dirs.append(_data)
549 except RepositoryError:
549 except RepositoryError:
550 log.debug("Exception in get_nodes", exc_info=True)
550 log.debug("Exception in get_nodes", exc_info=True)
551 raise
551 raise
552
552
553 return _dirs, _files
553 return _dirs, _files
554
554
555 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
555 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
556 author=None, trigger_push_hook=True):
556 author=None, trigger_push_hook=True):
557 """
557 """
558 Commits given multiple nodes into repo
558 Commits given multiple nodes into repo
559
559
560 :param user: RhodeCode User object or user_id, the commiter
560 :param user: RhodeCode User object or user_id, the commiter
561 :param repo: RhodeCode Repository object
561 :param repo: RhodeCode Repository object
562 :param message: commit message
562 :param message: commit message
563 :param nodes: mapping {filename:{'content':content},...}
563 :param nodes: mapping {filename:{'content':content},...}
564 :param parent_commit: parent commit, can be empty than it's
564 :param parent_commit: parent commit, can be empty than it's
565 initial commit
565 initial commit
566 :param author: author of commit, cna be different that commiter
566 :param author: author of commit, cna be different that commiter
567 only for git
567 only for git
568 :param trigger_push_hook: trigger push hooks
568 :param trigger_push_hook: trigger push hooks
569
569
570 :returns: new commited commit
570 :returns: new commited commit
571 """
571 """
572
572
573 user = self._get_user(user)
573 user = self._get_user(user)
574 scm_instance = repo.scm_instance(cache=False)
574 scm_instance = repo.scm_instance(cache=False)
575
575
576 processed_nodes = []
576 processed_nodes = []
577 for f_path in nodes:
577 for f_path in nodes:
578 f_path = self._sanitize_path(f_path)
578 f_path = self._sanitize_path(f_path)
579 content = nodes[f_path]['content']
579 content = nodes[f_path]['content']
580 f_path = safe_str(f_path)
580 f_path = safe_str(f_path)
581 # decoding here will force that we have proper encoded values
581 # decoding here will force that we have proper encoded values
582 # in any other case this will throw exceptions and deny commit
582 # in any other case this will throw exceptions and deny commit
583 if isinstance(content, (basestring,)):
583 if isinstance(content, (basestring,)):
584 content = safe_str(content)
584 content = safe_str(content)
585 elif isinstance(content, (file, cStringIO.OutputType,)):
585 elif isinstance(content, (file, cStringIO.OutputType,)):
586 content = content.read()
586 content = content.read()
587 else:
587 else:
588 raise Exception('Content is of unrecognized type %s' % (
588 raise Exception('Content is of unrecognized type %s' % (
589 type(content)
589 type(content)
590 ))
590 ))
591 processed_nodes.append((f_path, content))
591 processed_nodes.append((f_path, content))
592
592
593 message = safe_unicode(message)
593 message = safe_unicode(message)
594 commiter = user.full_contact
594 commiter = user.full_contact
595 author = safe_unicode(author) if author else commiter
595 author = safe_unicode(author) if author else commiter
596
596
597 imc = scm_instance.in_memory_commit
597 imc = scm_instance.in_memory_commit
598
598
599 if not parent_commit:
599 if not parent_commit:
600 parent_commit = EmptyCommit(alias=scm_instance.alias)
600 parent_commit = EmptyCommit(alias=scm_instance.alias)
601
601
602 if isinstance(parent_commit, EmptyCommit):
602 if isinstance(parent_commit, EmptyCommit):
603 # EmptyCommit means we we're editing empty repository
603 # EmptyCommit means we we're editing empty repository
604 parents = None
604 parents = None
605 else:
605 else:
606 parents = [parent_commit]
606 parents = [parent_commit]
607 # add multiple nodes
607 # add multiple nodes
608 for path, content in processed_nodes:
608 for path, content in processed_nodes:
609 imc.add(FileNode(path, content=content))
609 imc.add(FileNode(path, content=content))
610 # TODO: handle pre push scenario
610 # TODO: handle pre push scenario
611 tip = imc.commit(message=message,
611 tip = imc.commit(message=message,
612 author=author,
612 author=author,
613 parents=parents,
613 parents=parents,
614 branch=parent_commit.branch)
614 branch=parent_commit.branch)
615
615
616 self.mark_for_invalidation(repo.repo_name)
616 self.mark_for_invalidation(repo.repo_name)
617 if trigger_push_hook:
617 if trigger_push_hook:
618 hooks_utils.trigger_post_push_hook(
618 hooks_utils.trigger_post_push_hook(
619 username=user.username, action='push_local',
619 username=user.username, action='push_local',
620 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
620 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
621 commit_ids=[tip.raw_id])
621 commit_ids=[tip.raw_id])
622 return tip
622 return tip
623
623
624 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
624 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
625 author=None, trigger_push_hook=True):
625 author=None, trigger_push_hook=True):
626 user = self._get_user(user)
626 user = self._get_user(user)
627 scm_instance = repo.scm_instance(cache=False)
627 scm_instance = repo.scm_instance(cache=False)
628
628
629 message = safe_unicode(message)
629 message = safe_unicode(message)
630 commiter = user.full_contact
630 commiter = user.full_contact
631 author = safe_unicode(author) if author else commiter
631 author = safe_unicode(author) if author else commiter
632
632
633 imc = scm_instance.in_memory_commit
633 imc = scm_instance.in_memory_commit
634
634
635 if not parent_commit:
635 if not parent_commit:
636 parent_commit = EmptyCommit(alias=scm_instance.alias)
636 parent_commit = EmptyCommit(alias=scm_instance.alias)
637
637
638 if isinstance(parent_commit, EmptyCommit):
638 if isinstance(parent_commit, EmptyCommit):
639 # EmptyCommit means we we're editing empty repository
639 # EmptyCommit means we we're editing empty repository
640 parents = None
640 parents = None
641 else:
641 else:
642 parents = [parent_commit]
642 parents = [parent_commit]
643
643
644 # add multiple nodes
644 # add multiple nodes
645 for _filename, data in nodes.items():
645 for _filename, data in nodes.items():
646 # new filename, can be renamed from the old one, also sanitaze
646 # new filename, can be renamed from the old one, also sanitaze
647 # the path for any hack around relative paths like ../../ etc.
647 # the path for any hack around relative paths like ../../ etc.
648 filename = self._sanitize_path(data['filename'])
648 filename = self._sanitize_path(data['filename'])
649 old_filename = self._sanitize_path(_filename)
649 old_filename = self._sanitize_path(_filename)
650 content = data['content']
650 content = data['content']
651
651
652 filenode = FileNode(old_filename, content=content)
652 filenode = FileNode(old_filename, content=content)
653 op = data['op']
653 op = data['op']
654 if op == 'add':
654 if op == 'add':
655 imc.add(filenode)
655 imc.add(filenode)
656 elif op == 'del':
656 elif op == 'del':
657 imc.remove(filenode)
657 imc.remove(filenode)
658 elif op == 'mod':
658 elif op == 'mod':
659 if filename != old_filename:
659 if filename != old_filename:
660 # TODO: handle renames more efficient, needs vcs lib
660 # TODO: handle renames more efficient, needs vcs lib
661 # changes
661 # changes
662 imc.remove(filenode)
662 imc.remove(filenode)
663 imc.add(FileNode(filename, content=content))
663 imc.add(FileNode(filename, content=content))
664 else:
664 else:
665 imc.change(filenode)
665 imc.change(filenode)
666
666
667 try:
667 try:
668 # TODO: handle pre push scenario
668 # TODO: handle pre push scenario
669 # commit changes
669 # commit changes
670 tip = imc.commit(message=message,
670 tip = imc.commit(message=message,
671 author=author,
671 author=author,
672 parents=parents,
672 parents=parents,
673 branch=parent_commit.branch)
673 branch=parent_commit.branch)
674 except NodeNotChangedError:
674 except NodeNotChangedError:
675 raise
675 raise
676 except Exception as e:
676 except Exception as e:
677 log.exception("Unexpected exception during call to imc.commit")
677 log.exception("Unexpected exception during call to imc.commit")
678 raise IMCCommitError(str(e))
678 raise IMCCommitError(str(e))
679 finally:
679 finally:
680 # always clear caches, if commit fails we want fresh object also
680 # always clear caches, if commit fails we want fresh object also
681 self.mark_for_invalidation(repo.repo_name)
681 self.mark_for_invalidation(repo.repo_name)
682
682
683 if trigger_push_hook:
683 if trigger_push_hook:
684 hooks_utils.trigger_post_push_hook(
684 hooks_utils.trigger_post_push_hook(
685 username=user.username, action='push_local',
685 username=user.username, action='push_local',
686 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
686 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
687 commit_ids=[tip.raw_id])
687 commit_ids=[tip.raw_id])
688
688
689 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
689 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
690 author=None, trigger_push_hook=True):
690 author=None, trigger_push_hook=True):
691 """
691 """
692 Deletes given multiple nodes into `repo`
692 Deletes given multiple nodes into `repo`
693
693
694 :param user: RhodeCode User object or user_id, the committer
694 :param user: RhodeCode User object or user_id, the committer
695 :param repo: RhodeCode Repository object
695 :param repo: RhodeCode Repository object
696 :param message: commit message
696 :param message: commit message
697 :param nodes: mapping {filename:{'content':content},...}
697 :param nodes: mapping {filename:{'content':content},...}
698 :param parent_commit: parent commit, can be empty than it's initial
698 :param parent_commit: parent commit, can be empty than it's initial
699 commit
699 commit
700 :param author: author of commit, cna be different that commiter only
700 :param author: author of commit, cna be different that commiter only
701 for git
701 for git
702 :param trigger_push_hook: trigger push hooks
702 :param trigger_push_hook: trigger push hooks
703
703
704 :returns: new commit after deletion
704 :returns: new commit after deletion
705 """
705 """
706
706
707 user = self._get_user(user)
707 user = self._get_user(user)
708 scm_instance = repo.scm_instance(cache=False)
708 scm_instance = repo.scm_instance(cache=False)
709
709
710 processed_nodes = []
710 processed_nodes = []
711 for f_path in nodes:
711 for f_path in nodes:
712 f_path = self._sanitize_path(f_path)
712 f_path = self._sanitize_path(f_path)
713 # content can be empty but for compatabilty it allows same dicts
713 # content can be empty but for compatabilty it allows same dicts
714 # structure as add_nodes
714 # structure as add_nodes
715 content = nodes[f_path].get('content')
715 content = nodes[f_path].get('content')
716 processed_nodes.append((f_path, content))
716 processed_nodes.append((f_path, content))
717
717
718 message = safe_unicode(message)
718 message = safe_unicode(message)
719 commiter = user.full_contact
719 commiter = user.full_contact
720 author = safe_unicode(author) if author else commiter
720 author = safe_unicode(author) if author else commiter
721
721
722 imc = scm_instance.in_memory_commit
722 imc = scm_instance.in_memory_commit
723
723
724 if not parent_commit:
724 if not parent_commit:
725 parent_commit = EmptyCommit(alias=scm_instance.alias)
725 parent_commit = EmptyCommit(alias=scm_instance.alias)
726
726
727 if isinstance(parent_commit, EmptyCommit):
727 if isinstance(parent_commit, EmptyCommit):
728 # EmptyCommit means we we're editing empty repository
728 # EmptyCommit means we we're editing empty repository
729 parents = None
729 parents = None
730 else:
730 else:
731 parents = [parent_commit]
731 parents = [parent_commit]
732 # add multiple nodes
732 # add multiple nodes
733 for path, content in processed_nodes:
733 for path, content in processed_nodes:
734 imc.remove(FileNode(path, content=content))
734 imc.remove(FileNode(path, content=content))
735
735
736 # TODO: handle pre push scenario
736 # TODO: handle pre push scenario
737 tip = imc.commit(message=message,
737 tip = imc.commit(message=message,
738 author=author,
738 author=author,
739 parents=parents,
739 parents=parents,
740 branch=parent_commit.branch)
740 branch=parent_commit.branch)
741
741
742 self.mark_for_invalidation(repo.repo_name)
742 self.mark_for_invalidation(repo.repo_name)
743 if trigger_push_hook:
743 if trigger_push_hook:
744 hooks_utils.trigger_post_push_hook(
744 hooks_utils.trigger_post_push_hook(
745 username=user.username, action='push_local',
745 username=user.username, action='push_local',
746 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
746 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
747 commit_ids=[tip.raw_id])
747 commit_ids=[tip.raw_id])
748 return tip
748 return tip
749
749
750 def strip(self, repo, commit_id, branch):
750 def strip(self, repo, commit_id, branch):
751 scm_instance = repo.scm_instance(cache=False)
751 scm_instance = repo.scm_instance(cache=False)
752 scm_instance.config.clear_section('hooks')
752 scm_instance.config.clear_section('hooks')
753 scm_instance.strip(commit_id, branch)
753 scm_instance.strip(commit_id, branch)
754 self.mark_for_invalidation(repo.repo_name)
754 self.mark_for_invalidation(repo.repo_name)
755
755
756 def get_unread_journal(self):
756 def get_unread_journal(self):
757 return self.sa.query(UserLog).count()
757 return self.sa.query(UserLog).count()
758
758
759 def get_repo_landing_revs(self, repo=None):
759 def get_repo_landing_revs(self, repo=None):
760 """
760 """
761 Generates select option with tags branches and bookmarks (for hg only)
761 Generates select option with tags branches and bookmarks (for hg only)
762 grouped by type
762 grouped by type
763
763
764 :param repo:
764 :param repo:
765 """
765 """
766
766
767 hist_l = []
767 hist_l = []
768 choices = []
768 choices = []
769 repo = self._get_repo(repo)
769 repo = self._get_repo(repo)
770 hist_l.append(['rev:tip', _('latest tip')])
770 hist_l.append(['rev:tip', _('latest tip')])
771 choices.append('rev:tip')
771 choices.append('rev:tip')
772 if not repo:
772 if not repo:
773 return choices, hist_l
773 return choices, hist_l
774
774
775 repo = repo.scm_instance()
775 repo = repo.scm_instance()
776
776
777 branches_group = (
777 branches_group = (
778 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
778 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
779 for b in repo.branches],
779 for b in repo.branches],
780 _("Branches"))
780 _("Branches"))
781 hist_l.append(branches_group)
781 hist_l.append(branches_group)
782 choices.extend([x[0] for x in branches_group[0]])
782 choices.extend([x[0] for x in branches_group[0]])
783
783
784 if repo.alias == 'hg':
784 if repo.alias == 'hg':
785 bookmarks_group = (
785 bookmarks_group = (
786 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
786 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
787 for b in repo.bookmarks],
787 for b in repo.bookmarks],
788 _("Bookmarks"))
788 _("Bookmarks"))
789 hist_l.append(bookmarks_group)
789 hist_l.append(bookmarks_group)
790 choices.extend([x[0] for x in bookmarks_group[0]])
790 choices.extend([x[0] for x in bookmarks_group[0]])
791
791
792 tags_group = (
792 tags_group = (
793 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
793 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
794 for t in repo.tags],
794 for t in repo.tags],
795 _("Tags"))
795 _("Tags"))
796 hist_l.append(tags_group)
796 hist_l.append(tags_group)
797 choices.extend([x[0] for x in tags_group[0]])
797 choices.extend([x[0] for x in tags_group[0]])
798
798
799 return choices, hist_l
799 return choices, hist_l
800
800
801 def install_git_hook(self, repo, force_create=False):
801 def install_git_hook(self, repo, force_create=False):
802 """
802 """
803 Creates a rhodecode hook inside a git repository
803 Creates a rhodecode hook inside a git repository
804
804
805 :param repo: Instance of VCS repo
805 :param repo: Instance of VCS repo
806 :param force_create: Create even if same name hook exists
806 :param force_create: Create even if same name hook exists
807 """
807 """
808
808
809 loc = os.path.join(repo.path, 'hooks')
809 loc = os.path.join(repo.path, 'hooks')
810 if not repo.bare:
810 if not repo.bare:
811 loc = os.path.join(repo.path, '.git', 'hooks')
811 loc = os.path.join(repo.path, '.git', 'hooks')
812 if not os.path.isdir(loc):
812 if not os.path.isdir(loc):
813 os.makedirs(loc, mode=0777)
813 os.makedirs(loc, mode=0777)
814
814
815 tmpl_post = pkg_resources.resource_string(
815 tmpl_post = pkg_resources.resource_string(
816 'rhodecode', '/'.join(
816 'rhodecode', '/'.join(
817 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
817 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
818 tmpl_pre = pkg_resources.resource_string(
818 tmpl_pre = pkg_resources.resource_string(
819 'rhodecode', '/'.join(
819 'rhodecode', '/'.join(
820 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
820 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
821
821
822 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
822 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
823 _hook_file = os.path.join(loc, '%s-receive' % h_type)
823 _hook_file = os.path.join(loc, '%s-receive' % h_type)
824 log.debug('Installing git hook in repo %s', repo)
824 log.debug('Installing git hook in repo %s', repo)
825 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
825 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
826
826
827 if _rhodecode_hook or force_create:
827 if _rhodecode_hook or force_create:
828 log.debug('writing %s hook file !', h_type)
828 log.debug('writing %s hook file !', h_type)
829 try:
829 try:
830 with open(_hook_file, 'wb') as f:
830 with open(_hook_file, 'wb') as f:
831 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
831 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
832 tmpl = tmpl.replace('_ENV_', sys.executable)
832 tmpl = tmpl.replace('_ENV_', sys.executable)
833 f.write(tmpl)
833 f.write(tmpl)
834 os.chmod(_hook_file, 0755)
834 os.chmod(_hook_file, 0755)
835 except IOError:
835 except IOError:
836 log.exception('error writing hook file %s', _hook_file)
836 log.exception('error writing hook file %s', _hook_file)
837 else:
837 else:
838 log.debug('skipping writing hook file')
838 log.debug('skipping writing hook file')
839
839
840 def install_svn_hooks(self, repo, force_create=False):
840 def install_svn_hooks(self, repo, force_create=False):
841 """
841 """
842 Creates rhodecode hooks inside a svn repository
842 Creates rhodecode hooks inside a svn repository
843
843
844 :param repo: Instance of VCS repo
844 :param repo: Instance of VCS repo
845 :param force_create: Create even if same name hook exists
845 :param force_create: Create even if same name hook exists
846 """
846 """
847 hooks_path = os.path.join(repo.path, 'hooks')
847 hooks_path = os.path.join(repo.path, 'hooks')
848 if not os.path.isdir(hooks_path):
848 if not os.path.isdir(hooks_path):
849 os.makedirs(hooks_path)
849 os.makedirs(hooks_path)
850 post_commit_tmpl = pkg_resources.resource_string(
850 post_commit_tmpl = pkg_resources.resource_string(
851 'rhodecode', '/'.join(
851 'rhodecode', '/'.join(
852 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
852 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
853 pre_commit_template = pkg_resources.resource_string(
853 pre_commit_template = pkg_resources.resource_string(
854 'rhodecode', '/'.join(
854 'rhodecode', '/'.join(
855 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
855 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
856 templates = {
856 templates = {
857 'post-commit': post_commit_tmpl,
857 'post-commit': post_commit_tmpl,
858 'pre-commit': pre_commit_template
858 'pre-commit': pre_commit_template
859 }
859 }
860 for filename in templates:
860 for filename in templates:
861 _hook_file = os.path.join(hooks_path, filename)
861 _hook_file = os.path.join(hooks_path, filename)
862 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
862 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
863 if _rhodecode_hook or force_create:
863 if _rhodecode_hook or force_create:
864 log.debug('writing %s hook file !', filename)
864 log.debug('writing %s hook file !', filename)
865 template = templates[filename]
865 template = templates[filename]
866 try:
866 try:
867 with open(_hook_file, 'wb') as f:
867 with open(_hook_file, 'wb') as f:
868 template = template.replace(
868 template = template.replace(
869 '_TMPL_', rhodecode.__version__)
869 '_TMPL_', rhodecode.__version__)
870 template = template.replace('_ENV_', sys.executable)
870 template = template.replace('_ENV_', sys.executable)
871 f.write(template)
871 f.write(template)
872 os.chmod(_hook_file, 0755)
872 os.chmod(_hook_file, 0755)
873 except IOError:
873 except IOError:
874 log.exception('error writing hook file %s', filename)
874 log.exception('error writing hook file %s', filename)
875 else:
875 else:
876 log.debug('skipping writing hook file')
876 log.debug('skipping writing hook file')
877
877
878 def install_hooks(self, repo, repo_type):
878 def install_hooks(self, repo, repo_type):
879 if repo_type == 'git':
879 if repo_type == 'git':
880 self.install_git_hook(repo)
880 self.install_git_hook(repo)
881 elif repo_type == 'svn':
881 elif repo_type == 'svn':
882 self.install_svn_hooks(repo)
882 self.install_svn_hooks(repo)
883
883
884 def get_server_info(self, environ=None):
884 def get_server_info(self, environ=None):
885 import platform
885 import platform
886 import rhodecode
886 import rhodecode
887 import pkg_resources
887 import pkg_resources
888 from rhodecode.model.meta import Base as sql_base, Session
888 from rhodecode.model.meta import Base as sql_base, Session
889 from sqlalchemy.engine import url
889 from sqlalchemy.engine import url
890 from rhodecode.lib.base import get_server_ip_addr, get_server_port
890 from rhodecode.lib.base import get_server_ip_addr, get_server_port
891 from rhodecode.lib.vcs.backends.git import discover_git_version
891 from rhodecode.lib.vcs.backends.git import discover_git_version
892 from rhodecode.model.gist import GIST_STORE_LOC
892 from rhodecode.model.gist import GIST_STORE_LOC
893
893
894 def percentage(part, whole):
895 return 100 * float(part) / float(whole)
896
894 try:
897 try:
895 # cygwin cannot have yet psutil support.
898 # cygwin cannot have yet psutil support.
896 import psutil
899 import psutil
897 except ImportError:
900 except ImportError:
898 psutil = None
901 psutil = None
899
902
900 environ = environ or {}
903 environ = environ or {}
901 _NA = 'NOT AVAILABLE'
904 _NA = 'NOT AVAILABLE'
902 _memory = _NA
905 _memory = _NA
903 _uptime = _NA
906 _uptime = _NA
904 _boot_time = _NA
907 _boot_time = _NA
905 _cpu = _NA
908 _cpu = _NA
906 _disk = dict(percent=0, used=0, total=0, error='')
909 _disk = dict(percent=0, used=0, total=0, error='')
910 _disk_inodes = dict(percent=0, free=0, used=0, total=0, error='')
907 _load = {'1_min': _NA, '5_min': _NA, '15_min': _NA}
911 _load = {'1_min': _NA, '5_min': _NA, '15_min': _NA}
908
912
909 model = VcsSettingsModel()
913 model = VcsSettingsModel()
910 storage_path = model.get_repos_location()
914 storage_path = model.get_repos_location()
911 gist_storage_path = os.path.join(storage_path, GIST_STORE_LOC)
915 gist_storage_path = os.path.join(storage_path, GIST_STORE_LOC)
912 archive_storage_path = rhodecode.CONFIG.get('archive_cache_dir', '')
916 archive_storage_path = rhodecode.CONFIG.get('archive_cache_dir', '')
913 search_index_storage_path = rhodecode.CONFIG.get('search.location', '')
917 search_index_storage_path = rhodecode.CONFIG.get('search.location', '')
914
918
915 if psutil:
919 if psutil:
916 # disk storage
920 # disk storage
917 try:
921 try:
918 _disk = dict(psutil.disk_usage(storage_path)._asdict())
922 _disk = dict(psutil.disk_usage(storage_path)._asdict())
919 except Exception as e:
923 except Exception as e:
920 log.exception('Failed to fetch disk info')
924 log.exception('Failed to fetch disk info')
921 _disk = {'percent': 0, 'used': 0, 'total': 0, 'error': str(e)}
925 _disk = {'percent': 0, 'used': 0, 'total': 0, 'error': str(e)}
922
926
927 # disk inodes usage
928 try:
929 i_stat = os.statvfs(storage_path)
930
931 _disk_inodes['used'] = i_stat.f_ffree
932 _disk_inodes['free'] = i_stat.f_favail
933 _disk_inodes['total'] = i_stat.f_files
934 _disk_inodes['percent'] = percentage(
935 _disk_inodes['used'], _disk_inodes['total'])
936 except Exception as e:
937 log.exception('Failed to fetch disk inodes info')
938 _disk_inodes['error'] = str(e)
939
923 # memory
940 # memory
924 _memory = dict(psutil.virtual_memory()._asdict())
941 _memory = dict(psutil.virtual_memory()._asdict())
925 _memory['percent2'] = psutil._common.usage_percent(
942 _memory['percent2'] = psutil._common.usage_percent(
926 (_memory['total'] - _memory['free']),
943 (_memory['total'] - _memory['free']),
927 _memory['total'], 1)
944 _memory['total'], 1)
928
945
929 # load averages
946 # load averages
930 if hasattr(psutil.os, 'getloadavg'):
947 if hasattr(psutil.os, 'getloadavg'):
931 _load = dict(zip(
948 _load = dict(zip(
932 ['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
949 ['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
933 _uptime = time.time() - psutil.boot_time()
950 _uptime = time.time() - psutil.boot_time()
934 _boot_time = psutil.boot_time()
951 _boot_time = psutil.boot_time()
935 _cpu = psutil.cpu_percent(0.5)
952 _cpu = psutil.cpu_percent(0.5)
936
953
937 mods = dict([(p.project_name, p.version)
954 mods = dict([(p.project_name, p.version)
938 for p in pkg_resources.working_set])
955 for p in pkg_resources.working_set])
939
956
940 def get_storage_size(storage_path):
957 def get_storage_size(storage_path):
941 sizes = []
958 sizes = []
942 for file_ in os.listdir(storage_path):
959 for file_ in os.listdir(storage_path):
943 storage_file = os.path.join(storage_path, file_)
960 storage_file = os.path.join(storage_path, file_)
944 if os.path.isfile(storage_file):
961 if os.path.isfile(storage_file):
945 try:
962 try:
946 sizes.append(os.path.getsize(storage_file))
963 sizes.append(os.path.getsize(storage_file))
947 except OSError:
964 except OSError:
948 log.exception('Failed to get size of storage file %s',
965 log.exception('Failed to get size of storage file %s',
949 storage_file)
966 storage_file)
950 pass
967 pass
951
968
952 return sum(sizes)
969 return sum(sizes)
953
970
954 # archive cache storage
971 # archive cache storage
955 _disk_archive = {'percent': 0, 'used': 0, 'total': 0}
972 _disk_archive = {'percent': 0, 'used': 0, 'total': 0}
956 try:
973 try:
957 archive_storage_path_exists = os.path.isdir(
974 archive_storage_path_exists = os.path.isdir(
958 archive_storage_path)
975 archive_storage_path)
959 if archive_storage_path and archive_storage_path_exists:
976 if archive_storage_path and archive_storage_path_exists:
960 used = get_storage_size(archive_storage_path)
977 used = get_storage_size(archive_storage_path)
961 _disk_archive.update({
978 _disk_archive.update({
962 'used': used,
979 'used': used,
963 'total': used,
980 'total': used,
964 })
981 })
965 except Exception as e:
982 except Exception as e:
966 log.exception('failed to fetch archive cache storage')
983 log.exception('failed to fetch archive cache storage')
967 _disk_archive['error'] = str(e)
984 _disk_archive['error'] = str(e)
968
985
969 # search index storage
986 # search index storage
970 _disk_index = {'percent': 0, 'used': 0, 'total': 0}
987 _disk_index = {'percent': 0, 'used': 0, 'total': 0}
971 try:
988 try:
972 search_index_storage_path_exists = os.path.isdir(
989 search_index_storage_path_exists = os.path.isdir(
973 search_index_storage_path)
990 search_index_storage_path)
974 if search_index_storage_path_exists:
991 if search_index_storage_path_exists:
975 used = get_storage_size(search_index_storage_path)
992 used = get_storage_size(search_index_storage_path)
976 _disk_index.update({
993 _disk_index.update({
977 'percent': 100,
994 'percent': 100,
978 'used': used,
995 'used': used,
979 'total': used,
996 'total': used,
980 })
997 })
981 except Exception as e:
998 except Exception as e:
982 log.exception('failed to fetch search index storage')
999 log.exception('failed to fetch search index storage')
983 _disk_index['error'] = str(e)
1000 _disk_index['error'] = str(e)
984
1001
985 # gist storage
1002 # gist storage
986 _disk_gist = {'percent': 0, 'used': 0, 'total': 0, 'items': 0}
1003 _disk_gist = {'percent': 0, 'used': 0, 'total': 0, 'items': 0}
987 try:
1004 try:
988 items_count = 0
1005 items_count = 0
989 used = 0
1006 used = 0
990 for root, dirs, files in os.walk(safe_str(gist_storage_path)):
1007 for root, dirs, files in os.walk(safe_str(gist_storage_path)):
991 if root == gist_storage_path:
1008 if root == gist_storage_path:
992 items_count = len(dirs)
1009 items_count = len(dirs)
993
1010
994 for f in files:
1011 for f in files:
995 try:
1012 try:
996 used += os.path.getsize(os.path.join(root, f))
1013 used += os.path.getsize(os.path.join(root, f))
997 except OSError:
1014 except OSError:
998 pass
1015 pass
999 _disk_gist.update({
1016 _disk_gist.update({
1000 'percent': 100,
1017 'percent': 100,
1001 'used': used,
1018 'used': used,
1002 'total': used,
1019 'total': used,
1003 'items': items_count
1020 'items': items_count
1004 })
1021 })
1005 except Exception as e:
1022 except Exception as e:
1006 log.exception('failed to fetch gist storage items')
1023 log.exception('failed to fetch gist storage items')
1007 _disk_gist['error'] = str(e)
1024 _disk_gist['error'] = str(e)
1008
1025
1009 # GIT info
1026 # GIT info
1010 git_ver = discover_git_version()
1027 git_ver = discover_git_version()
1011
1028
1012 # SVN info
1029 # SVN info
1013 # TODO: johbo: Add discover_svn_version to replace this code.
1030 # TODO: johbo: Add discover_svn_version to replace this code.
1014 try:
1031 try:
1015 import svn.core
1032 import svn.core
1016 svn_ver = svn.core.SVN_VERSION
1033 svn_ver = svn.core.SVN_VERSION
1017 except ImportError:
1034 except ImportError:
1018 svn_ver = None
1035 svn_ver = None
1019
1036
1020 # DB stuff
1037 # DB stuff
1021 db_info = url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
1038 db_info = url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
1022 db_type = db_info.__to_string__()
1039 db_type = db_info.__to_string__()
1023 try:
1040 try:
1024 engine = sql_base.metadata.bind
1041 engine = sql_base.metadata.bind
1025 db_server_info = engine.dialect._get_server_version_info(
1042 db_server_info = engine.dialect._get_server_version_info(
1026 Session.connection(bind=engine))
1043 Session.connection(bind=engine))
1027 db_version = '%s %s' % (db_info.drivername,
1044 db_version = '%s %s' % (db_info.drivername,
1028 '.'.join(map(str, db_server_info)))
1045 '.'.join(map(str, db_server_info)))
1029 except Exception:
1046 except Exception:
1030 log.exception('failed to fetch db version')
1047 log.exception('failed to fetch db version')
1031 db_version = '%s %s' % (db_info.drivername, '?')
1048 db_version = '%s %s' % (db_info.drivername, '?')
1032
1049
1033 db_migrate = DbMigrateVersion.query().filter(
1050 db_migrate = DbMigrateVersion.query().filter(
1034 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
1051 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
1035 db_migrate_version = db_migrate.version
1052 db_migrate_version = db_migrate.version
1036
1053
1037 info = {
1054 info = {
1038 'py_version': ' '.join(platform._sys_version()),
1055 'py_version': ' '.join(platform._sys_version()),
1039 'py_path': sys.executable,
1056 'py_path': sys.executable,
1040 'py_modules': sorted(mods.items(), key=lambda k: k[0].lower()),
1057 'py_modules': sorted(mods.items(), key=lambda k: k[0].lower()),
1041
1058
1042 'platform': safe_unicode(platform.platform()),
1059 'platform': safe_unicode(platform.platform()),
1043 'storage': storage_path,
1060 'storage': storage_path,
1044 'archive_storage': archive_storage_path,
1061 'archive_storage': archive_storage_path,
1045 'index_storage': search_index_storage_path,
1062 'index_storage': search_index_storage_path,
1046 'gist_storage': gist_storage_path,
1063 'gist_storage': gist_storage_path,
1047
1064
1048
1065
1049 'db_type': db_type,
1066 'db_type': db_type,
1050 'db_version': db_version,
1067 'db_version': db_version,
1051 'db_migrate_version': db_migrate_version,
1068 'db_migrate_version': db_migrate_version,
1052
1069
1053 'rhodecode_version': rhodecode.__version__,
1070 'rhodecode_version': rhodecode.__version__,
1054 'rhodecode_config_ini': rhodecode.CONFIG.get('__file__'),
1071 'rhodecode_config_ini': rhodecode.CONFIG.get('__file__'),
1055 'server_ip': '%s:%s' % (
1072 'server_ip': '%s:%s' % (
1056 get_server_ip_addr(environ, log_errors=False),
1073 get_server_ip_addr(environ, log_errors=False),
1057 get_server_port(environ)
1074 get_server_port(environ)
1058 ),
1075 ),
1059 'server_id': rhodecode.CONFIG.get('instance_id'),
1076 'server_id': rhodecode.CONFIG.get('instance_id'),
1060
1077
1061 'git_version': safe_unicode(git_ver),
1078 'git_version': safe_unicode(git_ver),
1062 'hg_version': mods.get('mercurial'),
1079 'hg_version': mods.get('mercurial'),
1063 'svn_version': svn_ver,
1080 'svn_version': svn_ver,
1064
1081
1065 'uptime': _uptime,
1082 'uptime': _uptime,
1066 'boot_time': _boot_time,
1083 'boot_time': _boot_time,
1067 'load': _load,
1084 'load': _load,
1068 'cpu': _cpu,
1085 'cpu': _cpu,
1069 'memory': _memory,
1086 'memory': _memory,
1070 'disk': _disk,
1087 'disk': _disk,
1088 'disk_inodes': _disk_inodes,
1071 'disk_archive': _disk_archive,
1089 'disk_archive': _disk_archive,
1072 'disk_gist': _disk_gist,
1090 'disk_gist': _disk_gist,
1073 'disk_index': _disk_index,
1091 'disk_index': _disk_index,
1074 }
1092 }
1075 return info
1093 return info
1076
1094
1077
1095
1078 def _check_rhodecode_hook(hook_path):
1096 def _check_rhodecode_hook(hook_path):
1079 """
1097 """
1080 Check if the hook was created by RhodeCode
1098 Check if the hook was created by RhodeCode
1081 """
1099 """
1082 if not os.path.exists(hook_path):
1100 if not os.path.exists(hook_path):
1083 return True
1101 return True
1084
1102
1085 log.debug('hook exists, checking if it is from rhodecode')
1103 log.debug('hook exists, checking if it is from rhodecode')
1086 hook_content = _read_hook(hook_path)
1104 hook_content = _read_hook(hook_path)
1087 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
1105 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
1088 if matches:
1106 if matches:
1089 try:
1107 try:
1090 version = matches.groups()[0]
1108 version = matches.groups()[0]
1091 log.debug('got %s, it is rhodecode', version)
1109 log.debug('got %s, it is rhodecode', version)
1092 return True
1110 return True
1093 except Exception:
1111 except Exception:
1094 log.exception("Exception while reading the hook version.")
1112 log.exception("Exception while reading the hook version.")
1095
1113
1096 return False
1114 return False
1097
1115
1098
1116
1099 def _read_hook(hook_path):
1117 def _read_hook(hook_path):
1100 with open(hook_path, 'rb') as f:
1118 with open(hook_path, 'rb') as f:
1101 content = f.read()
1119 content = f.read()
1102 return content
1120 return content
@@ -1,89 +1,90 b''
1 <%
1 <%
2 elems = [
2 elems = [
3 ## general
3 ## general
4 (_('RhodeCode Enterprise version'), h.literal('%s <div class="link" id="check_for_update" >%s</div>' % (c.rhodecode_version, _('check for updates'))), ''),
4 (_('RhodeCode Enterprise version'), h.literal('%s <div class="link" id="check_for_update" >%s</div>' % (c.rhodecode_version, _('check for updates'))), ''),
5 (_('Upgrade info endpoint'), h.literal('%s <br/><span >%s.</span>' % (c.rhodecode_update_url, _('Note: please make sure this server can access this url'))), ''),
5 (_('Upgrade info endpoint'), h.literal('%s <br/><span >%s.</span>' % (c.rhodecode_update_url, _('Note: please make sure this server can access this url'))), ''),
6 (_('Configuration INI file'), c.rhodecode_config_ini, ''),
6 (_('Configuration INI file'), c.rhodecode_config_ini, ''),
7 ## systems stats
7 ## systems stats
8 (_('RhodeCode Enterprise Server IP'), c.server_ip, ''),
8 (_('RhodeCode Enterprise Server IP'), c.server_ip, ''),
9 (_('RhodeCode Enterprise Server ID'), c.server_id, ''),
9 (_('RhodeCode Enterprise Server ID'), c.server_id, ''),
10 (_('Platform'), c.platform, ''),
10 (_('Platform'), c.platform, ''),
11 (_('Uptime'), c.uptime_age, ''),
11 (_('Uptime'), c.uptime_age, ''),
12 (_('Storage location'), c.storage, ''),
12 (_('Storage location'), c.storage, ''),
13 (_('Storage disk space'), "%s/%s, %s%% used%s" % (h.format_byte_size_binary(c.disk['used']), h.format_byte_size_binary(c.disk['total']),(c.disk['percent']), ' %s' % c.disk['error'] if 'error' in c.disk else ''), ''),
13 (_('Storage disk space'), "%s/%s, %s%% used%s" % (h.format_byte_size_binary(c.disk['used']), h.format_byte_size_binary(c.disk['total']),(c.disk['percent']), ' %s' % c.disk['error'] if 'error' in c.disk else ''), ''),
14 (_('Storage file limit (inodes)'), "%s/%s, %.1f%% used%s" % (c.disk_inodes['used'], c.disk_inodes['total'],(c.disk_inodes['percent']), ' %s' % c.disk_inodes['error'] if 'error' in c.disk_inodes else ''), ''),
14
15
15 (_('Search index storage'), c.index_storage, ''),
16 (_('Search index storage'), c.index_storage, ''),
16 (_('Search index size'), "%s %s" % (h.format_byte_size_binary(c.disk_index['used']), ' %s' % c.disk_index['error'] if 'error' in c.disk_index else ''), ''),
17 (_('Search index size'), "%s %s" % (h.format_byte_size_binary(c.disk_index['used']), ' %s' % c.disk_index['error'] if 'error' in c.disk_index else ''), ''),
17
18
18 (_('Gist storage'), c.gist_storage, ''),
19 (_('Gist storage'), c.gist_storage, ''),
19 (_('Gist storage size'), "%s (%s items)%s" % (h.format_byte_size_binary(c.disk_gist['used']),c.disk_gist['items'], ' %s' % c.disk_gist['error'] if 'error' in c.disk_gist else ''), ''),
20 (_('Gist storage size'), "%s (%s items)%s" % (h.format_byte_size_binary(c.disk_gist['used']),c.disk_gist['items'], ' %s' % c.disk_gist['error'] if 'error' in c.disk_gist else ''), ''),
20
21
21 (_('Archive cache'), h.literal('%s <br/><span >%s.</span>' % (c.archive_storage, _('Enable this by setting archive_cache_dir=/path/to/cache option in the .ini file'))), ''),
22 (_('Archive cache'), h.literal('%s <br/><span >%s.</span>' % (c.archive_storage, _('Enable this by setting archive_cache_dir=/path/to/cache option in the .ini file'))), ''),
22 (_('Archive cache size'), "%s%s" % (h.format_byte_size_binary(c.disk_archive['used']), ' %s' % c.disk_archive['error'] if 'error' in c.disk_archive else ''), ''),
23 (_('Archive cache size'), "%s%s" % (h.format_byte_size_binary(c.disk_archive['used']), ' %s' % c.disk_archive['error'] if 'error' in c.disk_archive else ''), ''),
23
24
24 (_('System memory'), c.system_memory, ''),
25 (_('System memory'), c.system_memory, ''),
25 (_('CPU'), '%s %%' %(c.cpu), ''),
26 (_('CPU'), '%s %%' %(c.cpu), ''),
26 (_('Load'), '1min: %s, 5min: %s, 15min: %s' %(c.load['1_min'],c.load['5_min'],c.load['15_min']), ''),
27 (_('Load'), '1min: %s, 5min: %s, 15min: %s' %(c.load['1_min'],c.load['5_min'],c.load['15_min']), ''),
27
28
28 ## rhodecode stuff
29 ## rhodecode stuff
29 (_('Python version'), c.py_version, ''),
30 (_('Python version'), c.py_version, ''),
30 (_('Python path'), c.py_path, ''),
31 (_('Python path'), c.py_path, ''),
31 (_('GIT version'), c.git_version, ''),
32 (_('GIT version'), c.git_version, ''),
32 (_('HG version'), c.hg_version, ''),
33 (_('HG version'), c.hg_version, ''),
33 (_('SVN version'), c.svn_version, ''),
34 (_('SVN version'), c.svn_version, ''),
34 (_('Database'), "%s @ version: %s" % (c.db_type, c.db_migrate_version), ''),
35 (_('Database'), "%s @ version: %s" % (c.db_type, c.db_migrate_version), ''),
35 (_('Database version'), c.db_version, ''),
36 (_('Database version'), c.db_version, ''),
36
37
37 ]
38 ]
38 %>
39 %>
39
40
40 <div id="update_notice" style="display: none; margin: -40px 0px 20px 0px">
41 <div id="update_notice" style="display: none; margin: -40px 0px 20px 0px">
41 <div>${_('Checking for updates...')}</div>
42 <div>${_('Checking for updates...')}</div>
42 </div>
43 </div>
43
44
44
45
45 <div class="panel panel-default">
46 <div class="panel panel-default">
46 <div class="panel-heading">
47 <div class="panel-heading">
47 <h3 class="panel-title">${_('System Info')}</h3>
48 <h3 class="panel-title">${_('System Info')}</h3>
48 % if c.allowed_to_snapshot:
49 % if c.allowed_to_snapshot:
49 <a href="${url('admin_settings_system', snapshot=1)}" class="panel-edit">${_('create snapshot')}</a>
50 <a href="${url('admin_settings_system', snapshot=1)}" class="panel-edit">${_('create snapshot')}</a>
50 % endif
51 % endif
51 </div>
52 </div>
52 <div class="panel-body">
53 <div class="panel-body">
53 <dl class="dl-horizontal settings">
54 <dl class="dl-horizontal settings">
54 %for dt, dd, tt in elems:
55 %for dt, dd, tt in elems:
55 <dt>${dt}:</dt>
56 <dt>${dt}:</dt>
56 <dd title="${tt}">${dd}</dd>
57 <dd title="${tt}">${dd}</dd>
57 %endfor
58 %endfor
58 </dl>
59 </dl>
59 </div>
60 </div>
60 </div>
61 </div>
61
62
62 <div class="panel panel-default">
63 <div class="panel panel-default">
63 <div class="panel-heading">
64 <div class="panel-heading">
64 <h3 class="panel-title">${_('Python Packages')}</h3>
65 <h3 class="panel-title">${_('Python Packages')}</h3>
65 </div>
66 </div>
66 <div class="panel-body">
67 <div class="panel-body">
67 <table class="table">
68 <table class="table">
68 <colgroup>
69 <colgroup>
69 <col class='label'>
70 <col class='label'>
70 <col class='content'>
71 <col class='content'>
71 </colgroup>
72 </colgroup>
72 <tbody>
73 <tbody>
73 %for key, value in c.py_modules:
74 %for key, value in c.py_modules:
74 <tr>
75 <tr>
75 <td>${key}</td>
76 <td>${key}</td>
76 <td>${value}</td>
77 <td>${value}</td>
77 </tr>
78 </tr>
78 %endfor
79 %endfor
79 </tbody>
80 </tbody>
80 </table>
81 </table>
81 </div>
82 </div>
82 </div>
83 </div>
83
84
84 <script>
85 <script>
85 $('#check_for_update').click(function(e){
86 $('#check_for_update').click(function(e){
86 $('#update_notice').show();
87 $('#update_notice').show();
87 $('#update_notice').load("${h.url('admin_settings_system_update',version=c.rhodecode_version, platform=c.platform)}");
88 $('#update_notice').load("${h.url('admin_settings_system_update',version=c.rhodecode_version, platform=c.platform)}");
88 })
89 })
89 </script>
90 </script>
General Comments 0
You need to be logged in to leave comments. Login now