##// END OF EJS Templates
bugfix: santize wouldnt allow files starting with . - fixes #3936
dan -
r91:88f65698 default
parent child Browse files
Show More
@@ -1,1105 +1,1105 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import re
26 import re
27 import sys
27 import sys
28 import time
28 import time
29 import traceback
29 import traceback
30 import logging
30 import logging
31 import cStringIO
31 import cStringIO
32 import pkg_resources
32 import pkg_resources
33
33
34 import pylons
34 import pylons
35 from pylons.i18n.translation import _
35 from pylons.i18n.translation import _
36 from sqlalchemy import func
36 from sqlalchemy import func
37 from zope.cachedescriptors.property import Lazy as LazyProperty
37 from zope.cachedescriptors.property import Lazy as LazyProperty
38
38
39 import rhodecode
39 import rhodecode
40 from rhodecode.lib.vcs import get_backend
40 from rhodecode.lib.vcs import get_backend
41 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
41 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
42 from rhodecode.lib.vcs.nodes import FileNode
42 from rhodecode.lib.vcs.nodes import FileNode
43 from rhodecode.lib.vcs.backends.base import EmptyCommit
43 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 from rhodecode.lib import helpers as h
44 from rhodecode.lib import helpers as h
45
45
46 from rhodecode.lib.auth import (
46 from rhodecode.lib.auth import (
47 HasRepoPermissionAny, HasRepoGroupPermissionAny,
47 HasRepoPermissionAny, HasRepoGroupPermissionAny,
48 HasUserGroupPermissionAny)
48 HasUserGroupPermissionAny)
49 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
49 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
50 from rhodecode.lib import hooks_utils, caches
50 from rhodecode.lib import hooks_utils, caches
51 from rhodecode.lib.utils import (
51 from rhodecode.lib.utils import (
52 get_filesystem_repos, action_logger, make_db_config)
52 get_filesystem_repos, action_logger, make_db_config)
53 from rhodecode.lib.utils2 import (
53 from rhodecode.lib.utils2 import (
54 safe_str, safe_unicode, get_server_url, md5)
54 safe_str, safe_unicode, get_server_url, md5)
55 from rhodecode.model import BaseModel
55 from rhodecode.model import BaseModel
56 from rhodecode.model.db import (
56 from rhodecode.model.db import (
57 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
57 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
58 PullRequest, DbMigrateVersion)
58 PullRequest, DbMigrateVersion)
59 from rhodecode.model.settings import VcsSettingsModel
59 from rhodecode.model.settings import VcsSettingsModel
60
60
61 log = logging.getLogger(__name__)
61 log = logging.getLogger(__name__)
62
62
63
63
64 class UserTemp(object):
64 class UserTemp(object):
65 def __init__(self, user_id):
65 def __init__(self, user_id):
66 self.user_id = user_id
66 self.user_id = user_id
67
67
68 def __repr__(self):
68 def __repr__(self):
69 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
69 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
70
70
71
71
72 class RepoTemp(object):
72 class RepoTemp(object):
73 def __init__(self, repo_id):
73 def __init__(self, repo_id):
74 self.repo_id = repo_id
74 self.repo_id = repo_id
75
75
76 def __repr__(self):
76 def __repr__(self):
77 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
77 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
78
78
79
79
80 class SimpleCachedRepoList(object):
80 class SimpleCachedRepoList(object):
81 """
81 """
82 Lighter version of of iteration of repos without the scm initialisation,
82 Lighter version of of iteration of repos without the scm initialisation,
83 and with cache usage
83 and with cache usage
84 """
84 """
85 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
85 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
86 self.db_repo_list = db_repo_list
86 self.db_repo_list = db_repo_list
87 self.repos_path = repos_path
87 self.repos_path = repos_path
88 self.order_by = order_by
88 self.order_by = order_by
89 self.reversed = (order_by or '').startswith('-')
89 self.reversed = (order_by or '').startswith('-')
90 if not perm_set:
90 if not perm_set:
91 perm_set = ['repository.read', 'repository.write',
91 perm_set = ['repository.read', 'repository.write',
92 'repository.admin']
92 'repository.admin']
93 self.perm_set = perm_set
93 self.perm_set = perm_set
94
94
95 def __len__(self):
95 def __len__(self):
96 return len(self.db_repo_list)
96 return len(self.db_repo_list)
97
97
98 def __repr__(self):
98 def __repr__(self):
99 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
99 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
100
100
101 def __iter__(self):
101 def __iter__(self):
102 for dbr in self.db_repo_list:
102 for dbr in self.db_repo_list:
103 # check permission at this level
103 # check permission at this level
104 has_perm = HasRepoPermissionAny(*self.perm_set)(
104 has_perm = HasRepoPermissionAny(*self.perm_set)(
105 dbr.repo_name, 'SimpleCachedRepoList check')
105 dbr.repo_name, 'SimpleCachedRepoList check')
106 if not has_perm:
106 if not has_perm:
107 continue
107 continue
108
108
109 tmp_d = {
109 tmp_d = {
110 'name': dbr.repo_name,
110 'name': dbr.repo_name,
111 'dbrepo': dbr.get_dict(),
111 'dbrepo': dbr.get_dict(),
112 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
112 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
113 }
113 }
114 yield tmp_d
114 yield tmp_d
115
115
116
116
117 class _PermCheckIterator(object):
117 class _PermCheckIterator(object):
118
118
119 def __init__(
119 def __init__(
120 self, obj_list, obj_attr, perm_set, perm_checker,
120 self, obj_list, obj_attr, perm_set, perm_checker,
121 extra_kwargs=None):
121 extra_kwargs=None):
122 """
122 """
123 Creates iterator from given list of objects, additionally
123 Creates iterator from given list of objects, additionally
124 checking permission for them from perm_set var
124 checking permission for them from perm_set var
125
125
126 :param obj_list: list of db objects
126 :param obj_list: list of db objects
127 :param obj_attr: attribute of object to pass into perm_checker
127 :param obj_attr: attribute of object to pass into perm_checker
128 :param perm_set: list of permissions to check
128 :param perm_set: list of permissions to check
129 :param perm_checker: callable to check permissions against
129 :param perm_checker: callable to check permissions against
130 """
130 """
131 self.obj_list = obj_list
131 self.obj_list = obj_list
132 self.obj_attr = obj_attr
132 self.obj_attr = obj_attr
133 self.perm_set = perm_set
133 self.perm_set = perm_set
134 self.perm_checker = perm_checker
134 self.perm_checker = perm_checker
135 self.extra_kwargs = extra_kwargs or {}
135 self.extra_kwargs = extra_kwargs or {}
136
136
137 def __len__(self):
137 def __len__(self):
138 return len(self.obj_list)
138 return len(self.obj_list)
139
139
140 def __repr__(self):
140 def __repr__(self):
141 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
141 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
142
142
143 def __iter__(self):
143 def __iter__(self):
144 checker = self.perm_checker(*self.perm_set)
144 checker = self.perm_checker(*self.perm_set)
145 for db_obj in self.obj_list:
145 for db_obj in self.obj_list:
146 # check permission at this level
146 # check permission at this level
147 name = getattr(db_obj, self.obj_attr, None)
147 name = getattr(db_obj, self.obj_attr, None)
148 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
148 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
149 continue
149 continue
150
150
151 yield db_obj
151 yield db_obj
152
152
153
153
154 class RepoList(_PermCheckIterator):
154 class RepoList(_PermCheckIterator):
155
155
156 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
156 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
157 if not perm_set:
157 if not perm_set:
158 perm_set = [
158 perm_set = [
159 'repository.read', 'repository.write', 'repository.admin']
159 'repository.read', 'repository.write', 'repository.admin']
160
160
161 super(RepoList, self).__init__(
161 super(RepoList, self).__init__(
162 obj_list=db_repo_list,
162 obj_list=db_repo_list,
163 obj_attr='repo_name', perm_set=perm_set,
163 obj_attr='repo_name', perm_set=perm_set,
164 perm_checker=HasRepoPermissionAny,
164 perm_checker=HasRepoPermissionAny,
165 extra_kwargs=extra_kwargs)
165 extra_kwargs=extra_kwargs)
166
166
167
167
168 class RepoGroupList(_PermCheckIterator):
168 class RepoGroupList(_PermCheckIterator):
169
169
170 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
170 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
171 if not perm_set:
171 if not perm_set:
172 perm_set = ['group.read', 'group.write', 'group.admin']
172 perm_set = ['group.read', 'group.write', 'group.admin']
173
173
174 super(RepoGroupList, self).__init__(
174 super(RepoGroupList, self).__init__(
175 obj_list=db_repo_group_list,
175 obj_list=db_repo_group_list,
176 obj_attr='group_name', perm_set=perm_set,
176 obj_attr='group_name', perm_set=perm_set,
177 perm_checker=HasRepoGroupPermissionAny,
177 perm_checker=HasRepoGroupPermissionAny,
178 extra_kwargs=extra_kwargs)
178 extra_kwargs=extra_kwargs)
179
179
180
180
181 class UserGroupList(_PermCheckIterator):
181 class UserGroupList(_PermCheckIterator):
182
182
183 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
183 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
184 if not perm_set:
184 if not perm_set:
185 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
185 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
186
186
187 super(UserGroupList, self).__init__(
187 super(UserGroupList, self).__init__(
188 obj_list=db_user_group_list,
188 obj_list=db_user_group_list,
189 obj_attr='users_group_name', perm_set=perm_set,
189 obj_attr='users_group_name', perm_set=perm_set,
190 perm_checker=HasUserGroupPermissionAny,
190 perm_checker=HasUserGroupPermissionAny,
191 extra_kwargs=extra_kwargs)
191 extra_kwargs=extra_kwargs)
192
192
193
193
194 class ScmModel(BaseModel):
194 class ScmModel(BaseModel):
195 """
195 """
196 Generic Scm Model
196 Generic Scm Model
197 """
197 """
198
198
199 @LazyProperty
199 @LazyProperty
200 def repos_path(self):
200 def repos_path(self):
201 """
201 """
202 Gets the repositories root path from database
202 Gets the repositories root path from database
203 """
203 """
204
204
205 settings_model = VcsSettingsModel(sa=self.sa)
205 settings_model = VcsSettingsModel(sa=self.sa)
206 return settings_model.get_repos_location()
206 return settings_model.get_repos_location()
207
207
208 def repo_scan(self, repos_path=None):
208 def repo_scan(self, repos_path=None):
209 """
209 """
210 Listing of repositories in given path. This path should not be a
210 Listing of repositories in given path. This path should not be a
211 repository itself. Return a dictionary of repository objects
211 repository itself. Return a dictionary of repository objects
212
212
213 :param repos_path: path to directory containing repositories
213 :param repos_path: path to directory containing repositories
214 """
214 """
215
215
216 if repos_path is None:
216 if repos_path is None:
217 repos_path = self.repos_path
217 repos_path = self.repos_path
218
218
219 log.info('scanning for repositories in %s', repos_path)
219 log.info('scanning for repositories in %s', repos_path)
220
220
221 config = make_db_config()
221 config = make_db_config()
222 config.set('extensions', 'largefiles', '')
222 config.set('extensions', 'largefiles', '')
223 repos = {}
223 repos = {}
224
224
225 for name, path in get_filesystem_repos(repos_path, recursive=True):
225 for name, path in get_filesystem_repos(repos_path, recursive=True):
226 # name need to be decomposed and put back together using the /
226 # name need to be decomposed and put back together using the /
227 # since this is internal storage separator for rhodecode
227 # since this is internal storage separator for rhodecode
228 name = Repository.normalize_repo_name(name)
228 name = Repository.normalize_repo_name(name)
229
229
230 try:
230 try:
231 if name in repos:
231 if name in repos:
232 raise RepositoryError('Duplicate repository name %s '
232 raise RepositoryError('Duplicate repository name %s '
233 'found in %s' % (name, path))
233 'found in %s' % (name, path))
234 elif path[0] in rhodecode.BACKENDS:
234 elif path[0] in rhodecode.BACKENDS:
235 klass = get_backend(path[0])
235 klass = get_backend(path[0])
236 repos[name] = klass(path[1], config=config)
236 repos[name] = klass(path[1], config=config)
237 except OSError:
237 except OSError:
238 continue
238 continue
239 log.debug('found %s paths with repositories', len(repos))
239 log.debug('found %s paths with repositories', len(repos))
240 return repos
240 return repos
241
241
242 def get_repos(self, all_repos=None, sort_key=None):
242 def get_repos(self, all_repos=None, sort_key=None):
243 """
243 """
244 Get all repositories from db and for each repo create it's
244 Get all repositories from db and for each repo create it's
245 backend instance and fill that backed with information from database
245 backend instance and fill that backed with information from database
246
246
247 :param all_repos: list of repository names as strings
247 :param all_repos: list of repository names as strings
248 give specific repositories list, good for filtering
248 give specific repositories list, good for filtering
249
249
250 :param sort_key: initial sorting of repositories
250 :param sort_key: initial sorting of repositories
251 """
251 """
252 if all_repos is None:
252 if all_repos is None:
253 all_repos = self.sa.query(Repository)\
253 all_repos = self.sa.query(Repository)\
254 .filter(Repository.group_id == None)\
254 .filter(Repository.group_id == None)\
255 .order_by(func.lower(Repository.repo_name)).all()
255 .order_by(func.lower(Repository.repo_name)).all()
256 repo_iter = SimpleCachedRepoList(
256 repo_iter = SimpleCachedRepoList(
257 all_repos, repos_path=self.repos_path, order_by=sort_key)
257 all_repos, repos_path=self.repos_path, order_by=sort_key)
258 return repo_iter
258 return repo_iter
259
259
260 def get_repo_groups(self, all_groups=None):
260 def get_repo_groups(self, all_groups=None):
261 if all_groups is None:
261 if all_groups is None:
262 all_groups = RepoGroup.query()\
262 all_groups = RepoGroup.query()\
263 .filter(RepoGroup.group_parent_id == None).all()
263 .filter(RepoGroup.group_parent_id == None).all()
264 return [x for x in RepoGroupList(all_groups)]
264 return [x for x in RepoGroupList(all_groups)]
265
265
266 def mark_for_invalidation(self, repo_name, delete=False):
266 def mark_for_invalidation(self, repo_name, delete=False):
267 """
267 """
268 Mark caches of this repo invalid in the database. `delete` flag
268 Mark caches of this repo invalid in the database. `delete` flag
269 removes the cache entries
269 removes the cache entries
270
270
271 :param repo_name: the repo_name for which caches should be marked
271 :param repo_name: the repo_name for which caches should be marked
272 invalid, or deleted
272 invalid, or deleted
273 :param delete: delete the entry keys instead of setting bool
273 :param delete: delete the entry keys instead of setting bool
274 flag on them
274 flag on them
275 """
275 """
276 CacheKey.set_invalidate(repo_name, delete=delete)
276 CacheKey.set_invalidate(repo_name, delete=delete)
277 repo = Repository.get_by_repo_name(repo_name)
277 repo = Repository.get_by_repo_name(repo_name)
278
278
279 if repo:
279 if repo:
280 config = repo._config
280 config = repo._config
281 config.set('extensions', 'largefiles', '')
281 config.set('extensions', 'largefiles', '')
282 cs_cache = None
282 cs_cache = None
283 if delete:
283 if delete:
284 # if we do a hard clear, reset last-commit to Empty
284 # if we do a hard clear, reset last-commit to Empty
285 cs_cache = EmptyCommit()
285 cs_cache = EmptyCommit()
286 repo.update_commit_cache(config=config, cs_cache=cs_cache)
286 repo.update_commit_cache(config=config, cs_cache=cs_cache)
287 caches.clear_repo_caches(repo_name)
287 caches.clear_repo_caches(repo_name)
288
288
289 def toggle_following_repo(self, follow_repo_id, user_id):
289 def toggle_following_repo(self, follow_repo_id, user_id):
290
290
291 f = self.sa.query(UserFollowing)\
291 f = self.sa.query(UserFollowing)\
292 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
292 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
293 .filter(UserFollowing.user_id == user_id).scalar()
293 .filter(UserFollowing.user_id == user_id).scalar()
294
294
295 if f is not None:
295 if f is not None:
296 try:
296 try:
297 self.sa.delete(f)
297 self.sa.delete(f)
298 action_logger(UserTemp(user_id),
298 action_logger(UserTemp(user_id),
299 'stopped_following_repo',
299 'stopped_following_repo',
300 RepoTemp(follow_repo_id))
300 RepoTemp(follow_repo_id))
301 return
301 return
302 except Exception:
302 except Exception:
303 log.error(traceback.format_exc())
303 log.error(traceback.format_exc())
304 raise
304 raise
305
305
306 try:
306 try:
307 f = UserFollowing()
307 f = UserFollowing()
308 f.user_id = user_id
308 f.user_id = user_id
309 f.follows_repo_id = follow_repo_id
309 f.follows_repo_id = follow_repo_id
310 self.sa.add(f)
310 self.sa.add(f)
311
311
312 action_logger(UserTemp(user_id),
312 action_logger(UserTemp(user_id),
313 'started_following_repo',
313 'started_following_repo',
314 RepoTemp(follow_repo_id))
314 RepoTemp(follow_repo_id))
315 except Exception:
315 except Exception:
316 log.error(traceback.format_exc())
316 log.error(traceback.format_exc())
317 raise
317 raise
318
318
319 def toggle_following_user(self, follow_user_id, user_id):
319 def toggle_following_user(self, follow_user_id, user_id):
320 f = self.sa.query(UserFollowing)\
320 f = self.sa.query(UserFollowing)\
321 .filter(UserFollowing.follows_user_id == follow_user_id)\
321 .filter(UserFollowing.follows_user_id == follow_user_id)\
322 .filter(UserFollowing.user_id == user_id).scalar()
322 .filter(UserFollowing.user_id == user_id).scalar()
323
323
324 if f is not None:
324 if f is not None:
325 try:
325 try:
326 self.sa.delete(f)
326 self.sa.delete(f)
327 return
327 return
328 except Exception:
328 except Exception:
329 log.error(traceback.format_exc())
329 log.error(traceback.format_exc())
330 raise
330 raise
331
331
332 try:
332 try:
333 f = UserFollowing()
333 f = UserFollowing()
334 f.user_id = user_id
334 f.user_id = user_id
335 f.follows_user_id = follow_user_id
335 f.follows_user_id = follow_user_id
336 self.sa.add(f)
336 self.sa.add(f)
337 except Exception:
337 except Exception:
338 log.error(traceback.format_exc())
338 log.error(traceback.format_exc())
339 raise
339 raise
340
340
341 def is_following_repo(self, repo_name, user_id, cache=False):
341 def is_following_repo(self, repo_name, user_id, cache=False):
342 r = self.sa.query(Repository)\
342 r = self.sa.query(Repository)\
343 .filter(Repository.repo_name == repo_name).scalar()
343 .filter(Repository.repo_name == repo_name).scalar()
344
344
345 f = self.sa.query(UserFollowing)\
345 f = self.sa.query(UserFollowing)\
346 .filter(UserFollowing.follows_repository == r)\
346 .filter(UserFollowing.follows_repository == r)\
347 .filter(UserFollowing.user_id == user_id).scalar()
347 .filter(UserFollowing.user_id == user_id).scalar()
348
348
349 return f is not None
349 return f is not None
350
350
351 def is_following_user(self, username, user_id, cache=False):
351 def is_following_user(self, username, user_id, cache=False):
352 u = User.get_by_username(username)
352 u = User.get_by_username(username)
353
353
354 f = self.sa.query(UserFollowing)\
354 f = self.sa.query(UserFollowing)\
355 .filter(UserFollowing.follows_user == u)\
355 .filter(UserFollowing.follows_user == u)\
356 .filter(UserFollowing.user_id == user_id).scalar()
356 .filter(UserFollowing.user_id == user_id).scalar()
357
357
358 return f is not None
358 return f is not None
359
359
360 def get_followers(self, repo):
360 def get_followers(self, repo):
361 repo = self._get_repo(repo)
361 repo = self._get_repo(repo)
362
362
363 return self.sa.query(UserFollowing)\
363 return self.sa.query(UserFollowing)\
364 .filter(UserFollowing.follows_repository == repo).count()
364 .filter(UserFollowing.follows_repository == repo).count()
365
365
366 def get_forks(self, repo):
366 def get_forks(self, repo):
367 repo = self._get_repo(repo)
367 repo = self._get_repo(repo)
368 return self.sa.query(Repository)\
368 return self.sa.query(Repository)\
369 .filter(Repository.fork == repo).count()
369 .filter(Repository.fork == repo).count()
370
370
371 def get_pull_requests(self, repo):
371 def get_pull_requests(self, repo):
372 repo = self._get_repo(repo)
372 repo = self._get_repo(repo)
373 return self.sa.query(PullRequest)\
373 return self.sa.query(PullRequest)\
374 .filter(PullRequest.target_repo == repo)\
374 .filter(PullRequest.target_repo == repo)\
375 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
375 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
376
376
377 def mark_as_fork(self, repo, fork, user):
377 def mark_as_fork(self, repo, fork, user):
378 repo = self._get_repo(repo)
378 repo = self._get_repo(repo)
379 fork = self._get_repo(fork)
379 fork = self._get_repo(fork)
380 if fork and repo.repo_id == fork.repo_id:
380 if fork and repo.repo_id == fork.repo_id:
381 raise Exception("Cannot set repository as fork of itself")
381 raise Exception("Cannot set repository as fork of itself")
382
382
383 if fork and repo.repo_type != fork.repo_type:
383 if fork and repo.repo_type != fork.repo_type:
384 raise RepositoryError(
384 raise RepositoryError(
385 "Cannot set repository as fork of repository with other type")
385 "Cannot set repository as fork of repository with other type")
386
386
387 repo.fork = fork
387 repo.fork = fork
388 self.sa.add(repo)
388 self.sa.add(repo)
389 return repo
389 return repo
390
390
391 def pull_changes(self, repo, username):
391 def pull_changes(self, repo, username):
392 dbrepo = self._get_repo(repo)
392 dbrepo = self._get_repo(repo)
393 clone_uri = dbrepo.clone_uri
393 clone_uri = dbrepo.clone_uri
394 if not clone_uri:
394 if not clone_uri:
395 raise Exception("This repository doesn't have a clone uri")
395 raise Exception("This repository doesn't have a clone uri")
396
396
397 repo = dbrepo.scm_instance(cache=False)
397 repo = dbrepo.scm_instance(cache=False)
398 # TODO: marcink fix this an re-enable since we need common logic
398 # TODO: marcink fix this an re-enable since we need common logic
399 # for hg/git remove hooks so we don't trigger them on fetching
399 # for hg/git remove hooks so we don't trigger them on fetching
400 # commits from remote
400 # commits from remote
401 repo.config.clear_section('hooks')
401 repo.config.clear_section('hooks')
402
402
403 repo_name = dbrepo.repo_name
403 repo_name = dbrepo.repo_name
404 try:
404 try:
405 # TODO: we need to make sure those operations call proper hooks !
405 # TODO: we need to make sure those operations call proper hooks !
406 repo.pull(clone_uri)
406 repo.pull(clone_uri)
407
407
408 self.mark_for_invalidation(repo_name)
408 self.mark_for_invalidation(repo_name)
409 except Exception:
409 except Exception:
410 log.error(traceback.format_exc())
410 log.error(traceback.format_exc())
411 raise
411 raise
412
412
413 def commit_change(self, repo, repo_name, commit, user, author, message,
413 def commit_change(self, repo, repo_name, commit, user, author, message,
414 content, f_path):
414 content, f_path):
415 """
415 """
416 Commits changes
416 Commits changes
417
417
418 :param repo: SCM instance
418 :param repo: SCM instance
419
419
420 """
420 """
421 user = self._get_user(user)
421 user = self._get_user(user)
422
422
423 # decoding here will force that we have proper encoded values
423 # decoding here will force that we have proper encoded values
424 # in any other case this will throw exceptions and deny commit
424 # in any other case this will throw exceptions and deny commit
425 content = safe_str(content)
425 content = safe_str(content)
426 path = safe_str(f_path)
426 path = safe_str(f_path)
427 # message and author needs to be unicode
427 # message and author needs to be unicode
428 # proper backend should then translate that into required type
428 # proper backend should then translate that into required type
429 message = safe_unicode(message)
429 message = safe_unicode(message)
430 author = safe_unicode(author)
430 author = safe_unicode(author)
431 imc = repo.in_memory_commit
431 imc = repo.in_memory_commit
432 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
432 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
433 try:
433 try:
434 # TODO: handle pre-push action !
434 # TODO: handle pre-push action !
435 tip = imc.commit(
435 tip = imc.commit(
436 message=message, author=author, parents=[commit],
436 message=message, author=author, parents=[commit],
437 branch=commit.branch)
437 branch=commit.branch)
438 except Exception as e:
438 except Exception as e:
439 log.error(traceback.format_exc())
439 log.error(traceback.format_exc())
440 raise IMCCommitError(str(e))
440 raise IMCCommitError(str(e))
441 finally:
441 finally:
442 # always clear caches, if commit fails we want fresh object also
442 # always clear caches, if commit fails we want fresh object also
443 self.mark_for_invalidation(repo_name)
443 self.mark_for_invalidation(repo_name)
444
444
445 # We trigger the post-push action
445 # We trigger the post-push action
446 hooks_utils.trigger_post_push_hook(
446 hooks_utils.trigger_post_push_hook(
447 username=user.username, action='push_local', repo_name=repo_name,
447 username=user.username, action='push_local', repo_name=repo_name,
448 repo_alias=repo.alias, commit_ids=[tip.raw_id])
448 repo_alias=repo.alias, commit_ids=[tip.raw_id])
449 return tip
449 return tip
450
450
451 def _sanitize_path(self, f_path):
451 def _sanitize_path(self, f_path):
452 if f_path.startswith('/') or f_path.startswith('.') or '../' in f_path:
452 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
453 raise NonRelativePathError('%s is not an relative path' % f_path)
453 raise NonRelativePathError('%s is not an relative path' % f_path)
454 if f_path:
454 if f_path:
455 f_path = os.path.normpath(f_path)
455 f_path = os.path.normpath(f_path)
456 return f_path
456 return f_path
457
457
458 def get_dirnode_metadata(self, commit, dir_node):
458 def get_dirnode_metadata(self, commit, dir_node):
459 if not dir_node.is_dir():
459 if not dir_node.is_dir():
460 return []
460 return []
461
461
462 data = []
462 data = []
463 for node in dir_node:
463 for node in dir_node:
464 if not node.is_file():
464 if not node.is_file():
465 # we skip file-nodes
465 # we skip file-nodes
466 continue
466 continue
467
467
468 last_commit = node.last_commit
468 last_commit = node.last_commit
469 last_commit_date = last_commit.date
469 last_commit_date = last_commit.date
470 data.append({
470 data.append({
471 'name': node.name,
471 'name': node.name,
472 'size': h.format_byte_size_binary(node.size),
472 'size': h.format_byte_size_binary(node.size),
473 'modified_at': h.format_date(last_commit_date),
473 'modified_at': h.format_date(last_commit_date),
474 'modified_ts': last_commit_date.isoformat(),
474 'modified_ts': last_commit_date.isoformat(),
475 'revision': last_commit.revision,
475 'revision': last_commit.revision,
476 'short_id': last_commit.short_id,
476 'short_id': last_commit.short_id,
477 'message': h.escape(last_commit.message),
477 'message': h.escape(last_commit.message),
478 'author': h.escape(last_commit.author),
478 'author': h.escape(last_commit.author),
479 'user_profile': h.gravatar_with_user(last_commit.author),
479 'user_profile': h.gravatar_with_user(last_commit.author),
480 })
480 })
481
481
482 return data
482 return data
483
483
484 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
484 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
485 extended_info=False, content=False):
485 extended_info=False, content=False):
486 """
486 """
487 recursive walk in root dir and return a set of all path in that dir
487 recursive walk in root dir and return a set of all path in that dir
488 based on repository walk function
488 based on repository walk function
489
489
490 :param repo_name: name of repository
490 :param repo_name: name of repository
491 :param commit_id: commit id for which to list nodes
491 :param commit_id: commit id for which to list nodes
492 :param root_path: root path to list
492 :param root_path: root path to list
493 :param flat: return as a list, if False returns a dict with decription
493 :param flat: return as a list, if False returns a dict with decription
494
494
495 """
495 """
496 _files = list()
496 _files = list()
497 _dirs = list()
497 _dirs = list()
498 try:
498 try:
499 _repo = self._get_repo(repo_name)
499 _repo = self._get_repo(repo_name)
500 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
500 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
501 root_path = root_path.lstrip('/')
501 root_path = root_path.lstrip('/')
502 for __, dirs, files in commit.walk(root_path):
502 for __, dirs, files in commit.walk(root_path):
503 for f in files:
503 for f in files:
504 _content = None
504 _content = None
505 _data = f.unicode_path
505 _data = f.unicode_path
506 if not flat:
506 if not flat:
507 _data = {
507 _data = {
508 "name": f.unicode_path,
508 "name": f.unicode_path,
509 "type": "file",
509 "type": "file",
510 }
510 }
511 if extended_info:
511 if extended_info:
512 _content = safe_str(f.content)
512 _content = safe_str(f.content)
513 _data.update({
513 _data.update({
514 "md5": md5(_content),
514 "md5": md5(_content),
515 "binary": f.is_binary,
515 "binary": f.is_binary,
516 "size": f.size,
516 "size": f.size,
517 "extension": f.extension,
517 "extension": f.extension,
518
518
519 "mimetype": f.mimetype,
519 "mimetype": f.mimetype,
520 "lines": f.lines()[0]
520 "lines": f.lines()[0]
521 })
521 })
522 if content:
522 if content:
523 full_content = None
523 full_content = None
524 if not f.is_binary:
524 if not f.is_binary:
525 # in case we loaded the _content already
525 # in case we loaded the _content already
526 # re-use it, or load from f[ile]
526 # re-use it, or load from f[ile]
527 full_content = _content or safe_str(f.content)
527 full_content = _content or safe_str(f.content)
528
528
529 _data.update({
529 _data.update({
530 "content": full_content
530 "content": full_content
531 })
531 })
532 _files.append(_data)
532 _files.append(_data)
533 for d in dirs:
533 for d in dirs:
534 _data = d.unicode_path
534 _data = d.unicode_path
535 if not flat:
535 if not flat:
536 _data = {
536 _data = {
537 "name": d.unicode_path,
537 "name": d.unicode_path,
538 "type": "dir",
538 "type": "dir",
539 }
539 }
540 if extended_info:
540 if extended_info:
541 _data.update({
541 _data.update({
542 "md5": None,
542 "md5": None,
543 "binary": None,
543 "binary": None,
544 "size": None,
544 "size": None,
545 "extension": None,
545 "extension": None,
546 })
546 })
547 if content:
547 if content:
548 _data.update({
548 _data.update({
549 "content": None
549 "content": None
550 })
550 })
551 _dirs.append(_data)
551 _dirs.append(_data)
552 except RepositoryError:
552 except RepositoryError:
553 log.debug("Exception in get_nodes", exc_info=True)
553 log.debug("Exception in get_nodes", exc_info=True)
554 raise
554 raise
555
555
556 return _dirs, _files
556 return _dirs, _files
557
557
558 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
558 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
559 author=None, trigger_push_hook=True):
559 author=None, trigger_push_hook=True):
560 """
560 """
561 Commits given multiple nodes into repo
561 Commits given multiple nodes into repo
562
562
563 :param user: RhodeCode User object or user_id, the commiter
563 :param user: RhodeCode User object or user_id, the commiter
564 :param repo: RhodeCode Repository object
564 :param repo: RhodeCode Repository object
565 :param message: commit message
565 :param message: commit message
566 :param nodes: mapping {filename:{'content':content},...}
566 :param nodes: mapping {filename:{'content':content},...}
567 :param parent_commit: parent commit, can be empty than it's
567 :param parent_commit: parent commit, can be empty than it's
568 initial commit
568 initial commit
569 :param author: author of commit, cna be different that commiter
569 :param author: author of commit, cna be different that commiter
570 only for git
570 only for git
571 :param trigger_push_hook: trigger push hooks
571 :param trigger_push_hook: trigger push hooks
572
572
573 :returns: new commited commit
573 :returns: new commited commit
574 """
574 """
575
575
576 user = self._get_user(user)
576 user = self._get_user(user)
577 scm_instance = repo.scm_instance(cache=False)
577 scm_instance = repo.scm_instance(cache=False)
578
578
579 processed_nodes = []
579 processed_nodes = []
580 for f_path in nodes:
580 for f_path in nodes:
581 f_path = self._sanitize_path(f_path)
581 f_path = self._sanitize_path(f_path)
582 content = nodes[f_path]['content']
582 content = nodes[f_path]['content']
583 f_path = safe_str(f_path)
583 f_path = safe_str(f_path)
584 # decoding here will force that we have proper encoded values
584 # decoding here will force that we have proper encoded values
585 # in any other case this will throw exceptions and deny commit
585 # in any other case this will throw exceptions and deny commit
586 if isinstance(content, (basestring,)):
586 if isinstance(content, (basestring,)):
587 content = safe_str(content)
587 content = safe_str(content)
588 elif isinstance(content, (file, cStringIO.OutputType,)):
588 elif isinstance(content, (file, cStringIO.OutputType,)):
589 content = content.read()
589 content = content.read()
590 else:
590 else:
591 raise Exception('Content is of unrecognized type %s' % (
591 raise Exception('Content is of unrecognized type %s' % (
592 type(content)
592 type(content)
593 ))
593 ))
594 processed_nodes.append((f_path, content))
594 processed_nodes.append((f_path, content))
595
595
596 message = safe_unicode(message)
596 message = safe_unicode(message)
597 commiter = user.full_contact
597 commiter = user.full_contact
598 author = safe_unicode(author) if author else commiter
598 author = safe_unicode(author) if author else commiter
599
599
600 imc = scm_instance.in_memory_commit
600 imc = scm_instance.in_memory_commit
601
601
602 if not parent_commit:
602 if not parent_commit:
603 parent_commit = EmptyCommit(alias=scm_instance.alias)
603 parent_commit = EmptyCommit(alias=scm_instance.alias)
604
604
605 if isinstance(parent_commit, EmptyCommit):
605 if isinstance(parent_commit, EmptyCommit):
606 # EmptyCommit means we we're editing empty repository
606 # EmptyCommit means we we're editing empty repository
607 parents = None
607 parents = None
608 else:
608 else:
609 parents = [parent_commit]
609 parents = [parent_commit]
610 # add multiple nodes
610 # add multiple nodes
611 for path, content in processed_nodes:
611 for path, content in processed_nodes:
612 imc.add(FileNode(path, content=content))
612 imc.add(FileNode(path, content=content))
613 # TODO: handle pre push scenario
613 # TODO: handle pre push scenario
614 tip = imc.commit(message=message,
614 tip = imc.commit(message=message,
615 author=author,
615 author=author,
616 parents=parents,
616 parents=parents,
617 branch=parent_commit.branch)
617 branch=parent_commit.branch)
618
618
619 self.mark_for_invalidation(repo.repo_name)
619 self.mark_for_invalidation(repo.repo_name)
620 if trigger_push_hook:
620 if trigger_push_hook:
621 hooks_utils.trigger_post_push_hook(
621 hooks_utils.trigger_post_push_hook(
622 username=user.username, action='push_local',
622 username=user.username, action='push_local',
623 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
623 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
624 commit_ids=[tip.raw_id])
624 commit_ids=[tip.raw_id])
625 return tip
625 return tip
626
626
627 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
627 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
628 author=None, trigger_push_hook=True):
628 author=None, trigger_push_hook=True):
629 user = self._get_user(user)
629 user = self._get_user(user)
630 scm_instance = repo.scm_instance(cache=False)
630 scm_instance = repo.scm_instance(cache=False)
631
631
632 message = safe_unicode(message)
632 message = safe_unicode(message)
633 commiter = user.full_contact
633 commiter = user.full_contact
634 author = safe_unicode(author) if author else commiter
634 author = safe_unicode(author) if author else commiter
635
635
636 imc = scm_instance.in_memory_commit
636 imc = scm_instance.in_memory_commit
637
637
638 if not parent_commit:
638 if not parent_commit:
639 parent_commit = EmptyCommit(alias=scm_instance.alias)
639 parent_commit = EmptyCommit(alias=scm_instance.alias)
640
640
641 if isinstance(parent_commit, EmptyCommit):
641 if isinstance(parent_commit, EmptyCommit):
642 # EmptyCommit means we we're editing empty repository
642 # EmptyCommit means we we're editing empty repository
643 parents = None
643 parents = None
644 else:
644 else:
645 parents = [parent_commit]
645 parents = [parent_commit]
646
646
647 # add multiple nodes
647 # add multiple nodes
648 for _filename, data in nodes.items():
648 for _filename, data in nodes.items():
649 # new filename, can be renamed from the old one, also sanitaze
649 # new filename, can be renamed from the old one, also sanitaze
650 # the path for any hack around relative paths like ../../ etc.
650 # the path for any hack around relative paths like ../../ etc.
651 filename = self._sanitize_path(data['filename'])
651 filename = self._sanitize_path(data['filename'])
652 old_filename = self._sanitize_path(_filename)
652 old_filename = self._sanitize_path(_filename)
653 content = data['content']
653 content = data['content']
654
654
655 filenode = FileNode(old_filename, content=content)
655 filenode = FileNode(old_filename, content=content)
656 op = data['op']
656 op = data['op']
657 if op == 'add':
657 if op == 'add':
658 imc.add(filenode)
658 imc.add(filenode)
659 elif op == 'del':
659 elif op == 'del':
660 imc.remove(filenode)
660 imc.remove(filenode)
661 elif op == 'mod':
661 elif op == 'mod':
662 if filename != old_filename:
662 if filename != old_filename:
663 # TODO: handle renames more efficient, needs vcs lib
663 # TODO: handle renames more efficient, needs vcs lib
664 # changes
664 # changes
665 imc.remove(filenode)
665 imc.remove(filenode)
666 imc.add(FileNode(filename, content=content))
666 imc.add(FileNode(filename, content=content))
667 else:
667 else:
668 imc.change(filenode)
668 imc.change(filenode)
669
669
670 try:
670 try:
671 # TODO: handle pre push scenario
671 # TODO: handle pre push scenario
672 # commit changes
672 # commit changes
673 tip = imc.commit(message=message,
673 tip = imc.commit(message=message,
674 author=author,
674 author=author,
675 parents=parents,
675 parents=parents,
676 branch=parent_commit.branch)
676 branch=parent_commit.branch)
677 except NodeNotChangedError:
677 except NodeNotChangedError:
678 raise
678 raise
679 except Exception as e:
679 except Exception as e:
680 log.exception("Unexpected exception during call to imc.commit")
680 log.exception("Unexpected exception during call to imc.commit")
681 raise IMCCommitError(str(e))
681 raise IMCCommitError(str(e))
682 finally:
682 finally:
683 # always clear caches, if commit fails we want fresh object also
683 # always clear caches, if commit fails we want fresh object also
684 self.mark_for_invalidation(repo.repo_name)
684 self.mark_for_invalidation(repo.repo_name)
685
685
686 if trigger_push_hook:
686 if trigger_push_hook:
687 hooks_utils.trigger_post_push_hook(
687 hooks_utils.trigger_post_push_hook(
688 username=user.username, action='push_local',
688 username=user.username, action='push_local',
689 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
689 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
690 commit_ids=[tip.raw_id])
690 commit_ids=[tip.raw_id])
691
691
692 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
692 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
693 author=None, trigger_push_hook=True):
693 author=None, trigger_push_hook=True):
694 """
694 """
695 Deletes given multiple nodes into `repo`
695 Deletes given multiple nodes into `repo`
696
696
697 :param user: RhodeCode User object or user_id, the committer
697 :param user: RhodeCode User object or user_id, the committer
698 :param repo: RhodeCode Repository object
698 :param repo: RhodeCode Repository object
699 :param message: commit message
699 :param message: commit message
700 :param nodes: mapping {filename:{'content':content},...}
700 :param nodes: mapping {filename:{'content':content},...}
701 :param parent_commit: parent commit, can be empty than it's initial
701 :param parent_commit: parent commit, can be empty than it's initial
702 commit
702 commit
703 :param author: author of commit, cna be different that commiter only
703 :param author: author of commit, cna be different that commiter only
704 for git
704 for git
705 :param trigger_push_hook: trigger push hooks
705 :param trigger_push_hook: trigger push hooks
706
706
707 :returns: new commit after deletion
707 :returns: new commit after deletion
708 """
708 """
709
709
710 user = self._get_user(user)
710 user = self._get_user(user)
711 scm_instance = repo.scm_instance(cache=False)
711 scm_instance = repo.scm_instance(cache=False)
712
712
713 processed_nodes = []
713 processed_nodes = []
714 for f_path in nodes:
714 for f_path in nodes:
715 f_path = self._sanitize_path(f_path)
715 f_path = self._sanitize_path(f_path)
716 # content can be empty but for compatabilty it allows same dicts
716 # content can be empty but for compatabilty it allows same dicts
717 # structure as add_nodes
717 # structure as add_nodes
718 content = nodes[f_path].get('content')
718 content = nodes[f_path].get('content')
719 processed_nodes.append((f_path, content))
719 processed_nodes.append((f_path, content))
720
720
721 message = safe_unicode(message)
721 message = safe_unicode(message)
722 commiter = user.full_contact
722 commiter = user.full_contact
723 author = safe_unicode(author) if author else commiter
723 author = safe_unicode(author) if author else commiter
724
724
725 imc = scm_instance.in_memory_commit
725 imc = scm_instance.in_memory_commit
726
726
727 if not parent_commit:
727 if not parent_commit:
728 parent_commit = EmptyCommit(alias=scm_instance.alias)
728 parent_commit = EmptyCommit(alias=scm_instance.alias)
729
729
730 if isinstance(parent_commit, EmptyCommit):
730 if isinstance(parent_commit, EmptyCommit):
731 # EmptyCommit means we we're editing empty repository
731 # EmptyCommit means we we're editing empty repository
732 parents = None
732 parents = None
733 else:
733 else:
734 parents = [parent_commit]
734 parents = [parent_commit]
735 # add multiple nodes
735 # add multiple nodes
736 for path, content in processed_nodes:
736 for path, content in processed_nodes:
737 imc.remove(FileNode(path, content=content))
737 imc.remove(FileNode(path, content=content))
738
738
739 # TODO: handle pre push scenario
739 # TODO: handle pre push scenario
740 tip = imc.commit(message=message,
740 tip = imc.commit(message=message,
741 author=author,
741 author=author,
742 parents=parents,
742 parents=parents,
743 branch=parent_commit.branch)
743 branch=parent_commit.branch)
744
744
745 self.mark_for_invalidation(repo.repo_name)
745 self.mark_for_invalidation(repo.repo_name)
746 if trigger_push_hook:
746 if trigger_push_hook:
747 hooks_utils.trigger_post_push_hook(
747 hooks_utils.trigger_post_push_hook(
748 username=user.username, action='push_local',
748 username=user.username, action='push_local',
749 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
749 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
750 commit_ids=[tip.raw_id])
750 commit_ids=[tip.raw_id])
751 return tip
751 return tip
752
752
753 def strip(self, repo, commit_id, branch):
753 def strip(self, repo, commit_id, branch):
754 scm_instance = repo.scm_instance(cache=False)
754 scm_instance = repo.scm_instance(cache=False)
755 scm_instance.config.clear_section('hooks')
755 scm_instance.config.clear_section('hooks')
756 scm_instance.strip(commit_id, branch)
756 scm_instance.strip(commit_id, branch)
757 self.mark_for_invalidation(repo.repo_name)
757 self.mark_for_invalidation(repo.repo_name)
758
758
759 def get_unread_journal(self):
759 def get_unread_journal(self):
760 return self.sa.query(UserLog).count()
760 return self.sa.query(UserLog).count()
761
761
762 def get_repo_landing_revs(self, repo=None):
762 def get_repo_landing_revs(self, repo=None):
763 """
763 """
764 Generates select option with tags branches and bookmarks (for hg only)
764 Generates select option with tags branches and bookmarks (for hg only)
765 grouped by type
765 grouped by type
766
766
767 :param repo:
767 :param repo:
768 """
768 """
769
769
770 hist_l = []
770 hist_l = []
771 choices = []
771 choices = []
772 repo = self._get_repo(repo)
772 repo = self._get_repo(repo)
773 hist_l.append(['rev:tip', _('latest tip')])
773 hist_l.append(['rev:tip', _('latest tip')])
774 choices.append('rev:tip')
774 choices.append('rev:tip')
775 if not repo:
775 if not repo:
776 return choices, hist_l
776 return choices, hist_l
777
777
778 repo = repo.scm_instance()
778 repo = repo.scm_instance()
779
779
780 branches_group = (
780 branches_group = (
781 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
781 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
782 for b in repo.branches],
782 for b in repo.branches],
783 _("Branches"))
783 _("Branches"))
784 hist_l.append(branches_group)
784 hist_l.append(branches_group)
785 choices.extend([x[0] for x in branches_group[0]])
785 choices.extend([x[0] for x in branches_group[0]])
786
786
787 if repo.alias == 'hg':
787 if repo.alias == 'hg':
788 bookmarks_group = (
788 bookmarks_group = (
789 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
789 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
790 for b in repo.bookmarks],
790 for b in repo.bookmarks],
791 _("Bookmarks"))
791 _("Bookmarks"))
792 hist_l.append(bookmarks_group)
792 hist_l.append(bookmarks_group)
793 choices.extend([x[0] for x in bookmarks_group[0]])
793 choices.extend([x[0] for x in bookmarks_group[0]])
794
794
795 tags_group = (
795 tags_group = (
796 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
796 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
797 for t in repo.tags],
797 for t in repo.tags],
798 _("Tags"))
798 _("Tags"))
799 hist_l.append(tags_group)
799 hist_l.append(tags_group)
800 choices.extend([x[0] for x in tags_group[0]])
800 choices.extend([x[0] for x in tags_group[0]])
801
801
802 return choices, hist_l
802 return choices, hist_l
803
803
804 def install_git_hook(self, repo, force_create=False):
804 def install_git_hook(self, repo, force_create=False):
805 """
805 """
806 Creates a rhodecode hook inside a git repository
806 Creates a rhodecode hook inside a git repository
807
807
808 :param repo: Instance of VCS repo
808 :param repo: Instance of VCS repo
809 :param force_create: Create even if same name hook exists
809 :param force_create: Create even if same name hook exists
810 """
810 """
811
811
812 loc = os.path.join(repo.path, 'hooks')
812 loc = os.path.join(repo.path, 'hooks')
813 if not repo.bare:
813 if not repo.bare:
814 loc = os.path.join(repo.path, '.git', 'hooks')
814 loc = os.path.join(repo.path, '.git', 'hooks')
815 if not os.path.isdir(loc):
815 if not os.path.isdir(loc):
816 os.makedirs(loc, mode=0777)
816 os.makedirs(loc, mode=0777)
817
817
818 tmpl_post = pkg_resources.resource_string(
818 tmpl_post = pkg_resources.resource_string(
819 'rhodecode', '/'.join(
819 'rhodecode', '/'.join(
820 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
820 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
821 tmpl_pre = pkg_resources.resource_string(
821 tmpl_pre = pkg_resources.resource_string(
822 'rhodecode', '/'.join(
822 'rhodecode', '/'.join(
823 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
823 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
824
824
825 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
825 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
826 _hook_file = os.path.join(loc, '%s-receive' % h_type)
826 _hook_file = os.path.join(loc, '%s-receive' % h_type)
827 log.debug('Installing git hook in repo %s', repo)
827 log.debug('Installing git hook in repo %s', repo)
828 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
828 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
829
829
830 if _rhodecode_hook or force_create:
830 if _rhodecode_hook or force_create:
831 log.debug('writing %s hook file !', h_type)
831 log.debug('writing %s hook file !', h_type)
832 try:
832 try:
833 with open(_hook_file, 'wb') as f:
833 with open(_hook_file, 'wb') as f:
834 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
834 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
835 tmpl = tmpl.replace('_ENV_', sys.executable)
835 tmpl = tmpl.replace('_ENV_', sys.executable)
836 f.write(tmpl)
836 f.write(tmpl)
837 os.chmod(_hook_file, 0755)
837 os.chmod(_hook_file, 0755)
838 except IOError:
838 except IOError:
839 log.exception('error writing hook file %s', _hook_file)
839 log.exception('error writing hook file %s', _hook_file)
840 else:
840 else:
841 log.debug('skipping writing hook file')
841 log.debug('skipping writing hook file')
842
842
843 def install_svn_hooks(self, repo, force_create=False):
843 def install_svn_hooks(self, repo, force_create=False):
844 """
844 """
845 Creates rhodecode hooks inside a svn repository
845 Creates rhodecode hooks inside a svn repository
846
846
847 :param repo: Instance of VCS repo
847 :param repo: Instance of VCS repo
848 :param force_create: Create even if same name hook exists
848 :param force_create: Create even if same name hook exists
849 """
849 """
850 hooks_path = os.path.join(repo.path, 'hooks')
850 hooks_path = os.path.join(repo.path, 'hooks')
851 if not os.path.isdir(hooks_path):
851 if not os.path.isdir(hooks_path):
852 os.makedirs(hooks_path)
852 os.makedirs(hooks_path)
853 post_commit_tmpl = pkg_resources.resource_string(
853 post_commit_tmpl = pkg_resources.resource_string(
854 'rhodecode', '/'.join(
854 'rhodecode', '/'.join(
855 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
855 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
856 pre_commit_template = pkg_resources.resource_string(
856 pre_commit_template = pkg_resources.resource_string(
857 'rhodecode', '/'.join(
857 'rhodecode', '/'.join(
858 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
858 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
859 templates = {
859 templates = {
860 'post-commit': post_commit_tmpl,
860 'post-commit': post_commit_tmpl,
861 'pre-commit': pre_commit_template
861 'pre-commit': pre_commit_template
862 }
862 }
863 for filename in templates:
863 for filename in templates:
864 _hook_file = os.path.join(hooks_path, filename)
864 _hook_file = os.path.join(hooks_path, filename)
865 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
865 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
866 if _rhodecode_hook or force_create:
866 if _rhodecode_hook or force_create:
867 log.debug('writing %s hook file !', filename)
867 log.debug('writing %s hook file !', filename)
868 template = templates[filename]
868 template = templates[filename]
869 try:
869 try:
870 with open(_hook_file, 'wb') as f:
870 with open(_hook_file, 'wb') as f:
871 template = template.replace(
871 template = template.replace(
872 '_TMPL_', rhodecode.__version__)
872 '_TMPL_', rhodecode.__version__)
873 template = template.replace('_ENV_', sys.executable)
873 template = template.replace('_ENV_', sys.executable)
874 f.write(template)
874 f.write(template)
875 os.chmod(_hook_file, 0755)
875 os.chmod(_hook_file, 0755)
876 except IOError:
876 except IOError:
877 log.exception('error writing hook file %s', filename)
877 log.exception('error writing hook file %s', filename)
878 else:
878 else:
879 log.debug('skipping writing hook file')
879 log.debug('skipping writing hook file')
880
880
881 def install_hooks(self, repo, repo_type):
881 def install_hooks(self, repo, repo_type):
882 if repo_type == 'git':
882 if repo_type == 'git':
883 self.install_git_hook(repo)
883 self.install_git_hook(repo)
884 elif repo_type == 'svn':
884 elif repo_type == 'svn':
885 self.install_svn_hooks(repo)
885 self.install_svn_hooks(repo)
886
886
887 def get_server_info(self, environ=None):
887 def get_server_info(self, environ=None):
888 import platform
888 import platform
889 import rhodecode
889 import rhodecode
890 import pkg_resources
890 import pkg_resources
891 from rhodecode.model.meta import Base as sql_base, Session
891 from rhodecode.model.meta import Base as sql_base, Session
892 from sqlalchemy.engine import url
892 from sqlalchemy.engine import url
893 from rhodecode.lib.base import get_server_ip_addr, get_server_port
893 from rhodecode.lib.base import get_server_ip_addr, get_server_port
894 from rhodecode.lib.vcs.backends.git import discover_git_version
894 from rhodecode.lib.vcs.backends.git import discover_git_version
895 from rhodecode.model.gist import GIST_STORE_LOC
895 from rhodecode.model.gist import GIST_STORE_LOC
896
896
897 try:
897 try:
898 # cygwin cannot have yet psutil support.
898 # cygwin cannot have yet psutil support.
899 import psutil
899 import psutil
900 except ImportError:
900 except ImportError:
901 psutil = None
901 psutil = None
902
902
903 environ = environ or {}
903 environ = environ or {}
904 _NA = 'NOT AVAILABLE'
904 _NA = 'NOT AVAILABLE'
905 _memory = _NA
905 _memory = _NA
906 _uptime = _NA
906 _uptime = _NA
907 _boot_time = _NA
907 _boot_time = _NA
908 _cpu = _NA
908 _cpu = _NA
909 _disk = dict(percent=0, used=0, total=0, error='')
909 _disk = dict(percent=0, used=0, total=0, error='')
910 _load = {'1_min': _NA, '5_min': _NA, '15_min': _NA}
910 _load = {'1_min': _NA, '5_min': _NA, '15_min': _NA}
911
911
912 model = VcsSettingsModel()
912 model = VcsSettingsModel()
913 storage_path = model.get_repos_location()
913 storage_path = model.get_repos_location()
914 gist_storage_path = os.path.join(storage_path, GIST_STORE_LOC)
914 gist_storage_path = os.path.join(storage_path, GIST_STORE_LOC)
915 archive_storage_path = rhodecode.CONFIG.get('archive_cache_dir', '')
915 archive_storage_path = rhodecode.CONFIG.get('archive_cache_dir', '')
916 search_index_storage_path = rhodecode.CONFIG.get('search.location', '')
916 search_index_storage_path = rhodecode.CONFIG.get('search.location', '')
917
917
918 if psutil:
918 if psutil:
919 # disk storage
919 # disk storage
920 try:
920 try:
921 _disk = dict(psutil.disk_usage(storage_path)._asdict())
921 _disk = dict(psutil.disk_usage(storage_path)._asdict())
922 except Exception as e:
922 except Exception as e:
923 log.exception('Failed to fetch disk info')
923 log.exception('Failed to fetch disk info')
924 _disk = {'percent': 0, 'used': 0, 'total': 0, 'error': str(e)}
924 _disk = {'percent': 0, 'used': 0, 'total': 0, 'error': str(e)}
925
925
926 # memory
926 # memory
927 _memory = dict(psutil.virtual_memory()._asdict())
927 _memory = dict(psutil.virtual_memory()._asdict())
928 _memory['percent2'] = psutil._common.usage_percent(
928 _memory['percent2'] = psutil._common.usage_percent(
929 (_memory['total'] - _memory['free']),
929 (_memory['total'] - _memory['free']),
930 _memory['total'], 1)
930 _memory['total'], 1)
931
931
932 # load averages
932 # load averages
933 if hasattr(psutil.os, 'getloadavg'):
933 if hasattr(psutil.os, 'getloadavg'):
934 _load = dict(zip(
934 _load = dict(zip(
935 ['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
935 ['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
936 _uptime = time.time() - psutil.boot_time()
936 _uptime = time.time() - psutil.boot_time()
937 _boot_time = psutil.boot_time()
937 _boot_time = psutil.boot_time()
938 _cpu = psutil.cpu_percent(0.5)
938 _cpu = psutil.cpu_percent(0.5)
939
939
940 mods = dict([(p.project_name, p.version)
940 mods = dict([(p.project_name, p.version)
941 for p in pkg_resources.working_set])
941 for p in pkg_resources.working_set])
942
942
943 def get_storage_size(storage_path):
943 def get_storage_size(storage_path):
944 sizes = []
944 sizes = []
945 for file_ in os.listdir(storage_path):
945 for file_ in os.listdir(storage_path):
946 storage_file = os.path.join(storage_path, file_)
946 storage_file = os.path.join(storage_path, file_)
947 if os.path.isfile(storage_file):
947 if os.path.isfile(storage_file):
948 try:
948 try:
949 sizes.append(os.path.getsize(storage_file))
949 sizes.append(os.path.getsize(storage_file))
950 except OSError:
950 except OSError:
951 log.exception('Failed to get size of storage file %s',
951 log.exception('Failed to get size of storage file %s',
952 storage_file)
952 storage_file)
953 pass
953 pass
954
954
955 return sum(sizes)
955 return sum(sizes)
956
956
957 # archive cache storage
957 # archive cache storage
958 _disk_archive = {'percent': 0, 'used': 0, 'total': 0}
958 _disk_archive = {'percent': 0, 'used': 0, 'total': 0}
959 try:
959 try:
960 archive_storage_path_exists = os.path.isdir(
960 archive_storage_path_exists = os.path.isdir(
961 archive_storage_path)
961 archive_storage_path)
962 if archive_storage_path and archive_storage_path_exists:
962 if archive_storage_path and archive_storage_path_exists:
963 used = get_storage_size(archive_storage_path)
963 used = get_storage_size(archive_storage_path)
964 _disk_archive.update({
964 _disk_archive.update({
965 'used': used,
965 'used': used,
966 'total': used,
966 'total': used,
967 })
967 })
968 except Exception as e:
968 except Exception as e:
969 log.exception('failed to fetch archive cache storage')
969 log.exception('failed to fetch archive cache storage')
970 _disk_archive['error'] = str(e)
970 _disk_archive['error'] = str(e)
971
971
972 # search index storage
972 # search index storage
973 _disk_index = {'percent': 0, 'used': 0, 'total': 0}
973 _disk_index = {'percent': 0, 'used': 0, 'total': 0}
974 try:
974 try:
975 search_index_storage_path_exists = os.path.isdir(
975 search_index_storage_path_exists = os.path.isdir(
976 search_index_storage_path)
976 search_index_storage_path)
977 if search_index_storage_path_exists:
977 if search_index_storage_path_exists:
978 used = get_storage_size(search_index_storage_path)
978 used = get_storage_size(search_index_storage_path)
979 _disk_index.update({
979 _disk_index.update({
980 'percent': 100,
980 'percent': 100,
981 'used': used,
981 'used': used,
982 'total': used,
982 'total': used,
983 })
983 })
984 except Exception as e:
984 except Exception as e:
985 log.exception('failed to fetch search index storage')
985 log.exception('failed to fetch search index storage')
986 _disk_index['error'] = str(e)
986 _disk_index['error'] = str(e)
987
987
988 # gist storage
988 # gist storage
989 _disk_gist = {'percent': 0, 'used': 0, 'total': 0, 'items': 0}
989 _disk_gist = {'percent': 0, 'used': 0, 'total': 0, 'items': 0}
990 try:
990 try:
991 items_count = 0
991 items_count = 0
992 used = 0
992 used = 0
993 for root, dirs, files in os.walk(safe_str(gist_storage_path)):
993 for root, dirs, files in os.walk(safe_str(gist_storage_path)):
994 if root == gist_storage_path:
994 if root == gist_storage_path:
995 items_count = len(dirs)
995 items_count = len(dirs)
996
996
997 for f in files:
997 for f in files:
998 try:
998 try:
999 used += os.path.getsize(os.path.join(root, f))
999 used += os.path.getsize(os.path.join(root, f))
1000 except OSError:
1000 except OSError:
1001 pass
1001 pass
1002 _disk_gist.update({
1002 _disk_gist.update({
1003 'percent': 100,
1003 'percent': 100,
1004 'used': used,
1004 'used': used,
1005 'total': used,
1005 'total': used,
1006 'items': items_count
1006 'items': items_count
1007 })
1007 })
1008 except Exception as e:
1008 except Exception as e:
1009 log.exception('failed to fetch gist storage items')
1009 log.exception('failed to fetch gist storage items')
1010 _disk_gist['error'] = str(e)
1010 _disk_gist['error'] = str(e)
1011
1011
1012 # GIT info
1012 # GIT info
1013 git_ver = discover_git_version()
1013 git_ver = discover_git_version()
1014
1014
1015 # SVN info
1015 # SVN info
1016 # TODO: johbo: Add discover_svn_version to replace this code.
1016 # TODO: johbo: Add discover_svn_version to replace this code.
1017 try:
1017 try:
1018 import svn.core
1018 import svn.core
1019 svn_ver = svn.core.SVN_VERSION
1019 svn_ver = svn.core.SVN_VERSION
1020 except ImportError:
1020 except ImportError:
1021 svn_ver = None
1021 svn_ver = None
1022
1022
1023 # DB stuff
1023 # DB stuff
1024 db_info = url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
1024 db_info = url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
1025 db_type = db_info.__to_string__()
1025 db_type = db_info.__to_string__()
1026 try:
1026 try:
1027 engine = sql_base.metadata.bind
1027 engine = sql_base.metadata.bind
1028 db_server_info = engine.dialect._get_server_version_info(
1028 db_server_info = engine.dialect._get_server_version_info(
1029 Session.connection(bind=engine))
1029 Session.connection(bind=engine))
1030 db_version = '%s %s' % (db_info.drivername,
1030 db_version = '%s %s' % (db_info.drivername,
1031 '.'.join(map(str, db_server_info)))
1031 '.'.join(map(str, db_server_info)))
1032 except Exception:
1032 except Exception:
1033 log.exception('failed to fetch db version')
1033 log.exception('failed to fetch db version')
1034 db_version = '%s %s' % (db_info.drivername, '?')
1034 db_version = '%s %s' % (db_info.drivername, '?')
1035
1035
1036 db_migrate = DbMigrateVersion.query().filter(
1036 db_migrate = DbMigrateVersion.query().filter(
1037 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
1037 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
1038 db_migrate_version = db_migrate.version
1038 db_migrate_version = db_migrate.version
1039
1039
1040 info = {
1040 info = {
1041 'py_version': ' '.join(platform._sys_version()),
1041 'py_version': ' '.join(platform._sys_version()),
1042 'py_path': sys.executable,
1042 'py_path': sys.executable,
1043 'py_modules': sorted(mods.items(), key=lambda k: k[0].lower()),
1043 'py_modules': sorted(mods.items(), key=lambda k: k[0].lower()),
1044
1044
1045 'platform': safe_unicode(platform.platform()),
1045 'platform': safe_unicode(platform.platform()),
1046 'storage': storage_path,
1046 'storage': storage_path,
1047 'archive_storage': archive_storage_path,
1047 'archive_storage': archive_storage_path,
1048 'index_storage': search_index_storage_path,
1048 'index_storage': search_index_storage_path,
1049 'gist_storage': gist_storage_path,
1049 'gist_storage': gist_storage_path,
1050
1050
1051
1051
1052 'db_type': db_type,
1052 'db_type': db_type,
1053 'db_version': db_version,
1053 'db_version': db_version,
1054 'db_migrate_version': db_migrate_version,
1054 'db_migrate_version': db_migrate_version,
1055
1055
1056 'rhodecode_version': rhodecode.__version__,
1056 'rhodecode_version': rhodecode.__version__,
1057 'rhodecode_config_ini': rhodecode.CONFIG.get('__file__'),
1057 'rhodecode_config_ini': rhodecode.CONFIG.get('__file__'),
1058 'server_ip': '%s:%s' % (
1058 'server_ip': '%s:%s' % (
1059 get_server_ip_addr(environ, log_errors=False),
1059 get_server_ip_addr(environ, log_errors=False),
1060 get_server_port(environ)
1060 get_server_port(environ)
1061 ),
1061 ),
1062 'server_id': rhodecode.CONFIG.get('instance_id'),
1062 'server_id': rhodecode.CONFIG.get('instance_id'),
1063
1063
1064 'git_version': safe_unicode(git_ver),
1064 'git_version': safe_unicode(git_ver),
1065 'hg_version': mods.get('mercurial'),
1065 'hg_version': mods.get('mercurial'),
1066 'svn_version': svn_ver,
1066 'svn_version': svn_ver,
1067
1067
1068 'uptime': _uptime,
1068 'uptime': _uptime,
1069 'boot_time': _boot_time,
1069 'boot_time': _boot_time,
1070 'load': _load,
1070 'load': _load,
1071 'cpu': _cpu,
1071 'cpu': _cpu,
1072 'memory': _memory,
1072 'memory': _memory,
1073 'disk': _disk,
1073 'disk': _disk,
1074 'disk_archive': _disk_archive,
1074 'disk_archive': _disk_archive,
1075 'disk_gist': _disk_gist,
1075 'disk_gist': _disk_gist,
1076 'disk_index': _disk_index,
1076 'disk_index': _disk_index,
1077 }
1077 }
1078 return info
1078 return info
1079
1079
1080
1080
1081 def _check_rhodecode_hook(hook_path):
1081 def _check_rhodecode_hook(hook_path):
1082 """
1082 """
1083 Check if the hook was created by RhodeCode
1083 Check if the hook was created by RhodeCode
1084 """
1084 """
1085 if not os.path.exists(hook_path):
1085 if not os.path.exists(hook_path):
1086 return True
1086 return True
1087
1087
1088 log.debug('hook exists, checking if it is from rhodecode')
1088 log.debug('hook exists, checking if it is from rhodecode')
1089 hook_content = _read_hook(hook_path)
1089 hook_content = _read_hook(hook_path)
1090 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
1090 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
1091 if matches:
1091 if matches:
1092 try:
1092 try:
1093 version = matches.groups()[0]
1093 version = matches.groups()[0]
1094 log.debug('got %s, it is rhodecode', version)
1094 log.debug('got %s, it is rhodecode', version)
1095 return True
1095 return True
1096 except Exception:
1096 except Exception:
1097 log.exception("Exception while reading the hook version.")
1097 log.exception("Exception while reading the hook version.")
1098
1098
1099 return False
1099 return False
1100
1100
1101
1101
1102 def _read_hook(hook_path):
1102 def _read_hook(hook_path):
1103 with open(hook_path, 'rb') as f:
1103 with open(hook_path, 'rb') as f:
1104 content = f.read()
1104 content = f.read()
1105 return content
1105 return content
General Comments 0
You need to be logged in to leave comments. Login now