##// END OF EJS Templates
auth: use single instance of auth checker to speed up execution of the permissions check by not creating new instance in each loop iteration.
marcink -
r4142:79ad8339 default
parent child Browse files
Show More
@@ -1,1021 +1,1020 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import traceback
26 import traceback
27 import logging
27 import logging
28 import cStringIO
28 import cStringIO
29
29
30 from sqlalchemy import func
30 from sqlalchemy import func
31 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import Lazy as LazyProperty
32
32
33 import rhodecode
33 import rhodecode
34 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs import get_backend
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.nodes import FileNode
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib import helpers as h, rc_cache
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 HasUserGroupPermissionAny)
41 HasUserGroupPermissionAny)
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 from rhodecode.lib import hooks_utils
43 from rhodecode.lib import hooks_utils
44 from rhodecode.lib.utils import (
44 from rhodecode.lib.utils import (
45 get_filesystem_repos, make_db_config)
45 get_filesystem_repos, make_db_config)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.lib.system_info import get_system_info
48 from rhodecode.model import BaseModel
48 from rhodecode.model import BaseModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 or_, false,
50 or_, false,
51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
52 PullRequest, FileStore)
52 PullRequest, FileStore)
53 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 class UserTemp(object):
59 class UserTemp(object):
60 def __init__(self, user_id):
60 def __init__(self, user_id):
61 self.user_id = user_id
61 self.user_id = user_id
62
62
63 def __repr__(self):
63 def __repr__(self):
64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
65
65
66
66
67 class RepoTemp(object):
67 class RepoTemp(object):
68 def __init__(self, repo_id):
68 def __init__(self, repo_id):
69 self.repo_id = repo_id
69 self.repo_id = repo_id
70
70
71 def __repr__(self):
71 def __repr__(self):
72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
73
73
74
74
75 class SimpleCachedRepoList(object):
75 class SimpleCachedRepoList(object):
76 """
76 """
77 Lighter version of of iteration of repos without the scm initialisation,
77 Lighter version of of iteration of repos without the scm initialisation,
78 and with cache usage
78 and with cache usage
79 """
79 """
80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 self.db_repo_list = db_repo_list
81 self.db_repo_list = db_repo_list
82 self.repos_path = repos_path
82 self.repos_path = repos_path
83 self.order_by = order_by
83 self.order_by = order_by
84 self.reversed = (order_by or '').startswith('-')
84 self.reversed = (order_by or '').startswith('-')
85 if not perm_set:
85 if not perm_set:
86 perm_set = ['repository.read', 'repository.write',
86 perm_set = ['repository.read', 'repository.write',
87 'repository.admin']
87 'repository.admin']
88 self.perm_set = perm_set
88 self.perm_set = perm_set
89
89
90 def __len__(self):
90 def __len__(self):
91 return len(self.db_repo_list)
91 return len(self.db_repo_list)
92
92
93 def __repr__(self):
93 def __repr__(self):
94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
95
95
96 def __iter__(self):
96 def __iter__(self):
97 for dbr in self.db_repo_list:
97 for dbr in self.db_repo_list:
98 # check permission at this level
98 # check permission at this level
99 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 has_perm = HasRepoPermissionAny(*self.perm_set)(
100 dbr.repo_name, 'SimpleCachedRepoList check')
100 dbr.repo_name, 'SimpleCachedRepoList check')
101 if not has_perm:
101 if not has_perm:
102 continue
102 continue
103
103
104 tmp_d = {
104 tmp_d = {
105 'name': dbr.repo_name,
105 'name': dbr.repo_name,
106 'dbrepo': dbr.get_dict(),
106 'dbrepo': dbr.get_dict(),
107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
108 }
108 }
109 yield tmp_d
109 yield tmp_d
110
110
111
111
112 class _PermCheckIterator(object):
112 class _PermCheckIterator(object):
113
113
114 def __init__(
114 def __init__(
115 self, obj_list, obj_attr, perm_set, perm_checker,
115 self, obj_list, obj_attr, perm_set, perm_checker,
116 extra_kwargs=None):
116 extra_kwargs=None):
117 """
117 """
118 Creates iterator from given list of objects, additionally
118 Creates iterator from given list of objects, additionally
119 checking permission for them from perm_set var
119 checking permission for them from perm_set var
120
120
121 :param obj_list: list of db objects
121 :param obj_list: list of db objects
122 :param obj_attr: attribute of object to pass into perm_checker
122 :param obj_attr: attribute of object to pass into perm_checker
123 :param perm_set: list of permissions to check
123 :param perm_set: list of permissions to check
124 :param perm_checker: callable to check permissions against
124 :param perm_checker: callable to check permissions against
125 """
125 """
126 self.obj_list = obj_list
126 self.obj_list = obj_list
127 self.obj_attr = obj_attr
127 self.obj_attr = obj_attr
128 self.perm_set = perm_set
128 self.perm_set = perm_set
129 self.perm_checker = perm_checker
129 self.perm_checker = perm_checker(*self.perm_set)
130 self.extra_kwargs = extra_kwargs or {}
130 self.extra_kwargs = extra_kwargs or {}
131
131
132 def __len__(self):
132 def __len__(self):
133 return len(self.obj_list)
133 return len(self.obj_list)
134
134
135 def __repr__(self):
135 def __repr__(self):
136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
137
137
138 def __iter__(self):
138 def __iter__(self):
139 checker = self.perm_checker(*self.perm_set)
140 for db_obj in self.obj_list:
139 for db_obj in self.obj_list:
141 # check permission at this level
140 # check permission at this level
142 name = getattr(db_obj, self.obj_attr, None)
141 name = getattr(db_obj, self.obj_attr, None)
143 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
144 continue
143 continue
145
144
146 yield db_obj
145 yield db_obj
147
146
148
147
149 class RepoList(_PermCheckIterator):
148 class RepoList(_PermCheckIterator):
150
149
151 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
152 if not perm_set:
151 if not perm_set:
153 perm_set = [
152 perm_set = [
154 'repository.read', 'repository.write', 'repository.admin']
153 'repository.read', 'repository.write', 'repository.admin']
155
154
156 super(RepoList, self).__init__(
155 super(RepoList, self).__init__(
157 obj_list=db_repo_list,
156 obj_list=db_repo_list,
158 obj_attr='repo_name', perm_set=perm_set,
157 obj_attr='repo_name', perm_set=perm_set,
159 perm_checker=HasRepoPermissionAny,
158 perm_checker=HasRepoPermissionAny,
160 extra_kwargs=extra_kwargs)
159 extra_kwargs=extra_kwargs)
161
160
162
161
163 class RepoGroupList(_PermCheckIterator):
162 class RepoGroupList(_PermCheckIterator):
164
163
165 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
166 if not perm_set:
165 if not perm_set:
167 perm_set = ['group.read', 'group.write', 'group.admin']
166 perm_set = ['group.read', 'group.write', 'group.admin']
168
167
169 super(RepoGroupList, self).__init__(
168 super(RepoGroupList, self).__init__(
170 obj_list=db_repo_group_list,
169 obj_list=db_repo_group_list,
171 obj_attr='group_name', perm_set=perm_set,
170 obj_attr='group_name', perm_set=perm_set,
172 perm_checker=HasRepoGroupPermissionAny,
171 perm_checker=HasRepoGroupPermissionAny,
173 extra_kwargs=extra_kwargs)
172 extra_kwargs=extra_kwargs)
174
173
175
174
176 class UserGroupList(_PermCheckIterator):
175 class UserGroupList(_PermCheckIterator):
177
176
178 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
179 if not perm_set:
178 if not perm_set:
180 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
181
180
182 super(UserGroupList, self).__init__(
181 super(UserGroupList, self).__init__(
183 obj_list=db_user_group_list,
182 obj_list=db_user_group_list,
184 obj_attr='users_group_name', perm_set=perm_set,
183 obj_attr='users_group_name', perm_set=perm_set,
185 perm_checker=HasUserGroupPermissionAny,
184 perm_checker=HasUserGroupPermissionAny,
186 extra_kwargs=extra_kwargs)
185 extra_kwargs=extra_kwargs)
187
186
188
187
189 class ScmModel(BaseModel):
188 class ScmModel(BaseModel):
190 """
189 """
191 Generic Scm Model
190 Generic Scm Model
192 """
191 """
193
192
194 @LazyProperty
193 @LazyProperty
195 def repos_path(self):
194 def repos_path(self):
196 """
195 """
197 Gets the repositories root path from database
196 Gets the repositories root path from database
198 """
197 """
199
198
200 settings_model = VcsSettingsModel(sa=self.sa)
199 settings_model = VcsSettingsModel(sa=self.sa)
201 return settings_model.get_repos_location()
200 return settings_model.get_repos_location()
202
201
203 def repo_scan(self, repos_path=None):
202 def repo_scan(self, repos_path=None):
204 """
203 """
205 Listing of repositories in given path. This path should not be a
204 Listing of repositories in given path. This path should not be a
206 repository itself. Return a dictionary of repository objects
205 repository itself. Return a dictionary of repository objects
207
206
208 :param repos_path: path to directory containing repositories
207 :param repos_path: path to directory containing repositories
209 """
208 """
210
209
211 if repos_path is None:
210 if repos_path is None:
212 repos_path = self.repos_path
211 repos_path = self.repos_path
213
212
214 log.info('scanning for repositories in %s', repos_path)
213 log.info('scanning for repositories in %s', repos_path)
215
214
216 config = make_db_config()
215 config = make_db_config()
217 config.set('extensions', 'largefiles', '')
216 config.set('extensions', 'largefiles', '')
218 repos = {}
217 repos = {}
219
218
220 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
221 # name need to be decomposed and put back together using the /
220 # name need to be decomposed and put back together using the /
222 # since this is internal storage separator for rhodecode
221 # since this is internal storage separator for rhodecode
223 name = Repository.normalize_repo_name(name)
222 name = Repository.normalize_repo_name(name)
224
223
225 try:
224 try:
226 if name in repos:
225 if name in repos:
227 raise RepositoryError('Duplicate repository name %s '
226 raise RepositoryError('Duplicate repository name %s '
228 'found in %s' % (name, path))
227 'found in %s' % (name, path))
229 elif path[0] in rhodecode.BACKENDS:
228 elif path[0] in rhodecode.BACKENDS:
230 backend = get_backend(path[0])
229 backend = get_backend(path[0])
231 repos[name] = backend(path[1], config=config,
230 repos[name] = backend(path[1], config=config,
232 with_wire={"cache": False})
231 with_wire={"cache": False})
233 except OSError:
232 except OSError:
234 continue
233 continue
235 log.debug('found %s paths with repositories', len(repos))
234 log.debug('found %s paths with repositories', len(repos))
236 return repos
235 return repos
237
236
238 def get_repos(self, all_repos=None, sort_key=None):
237 def get_repos(self, all_repos=None, sort_key=None):
239 """
238 """
240 Get all repositories from db and for each repo create it's
239 Get all repositories from db and for each repo create it's
241 backend instance and fill that backed with information from database
240 backend instance and fill that backed with information from database
242
241
243 :param all_repos: list of repository names as strings
242 :param all_repos: list of repository names as strings
244 give specific repositories list, good for filtering
243 give specific repositories list, good for filtering
245
244
246 :param sort_key: initial sorting of repositories
245 :param sort_key: initial sorting of repositories
247 """
246 """
248 if all_repos is None:
247 if all_repos is None:
249 all_repos = self.sa.query(Repository)\
248 all_repos = self.sa.query(Repository)\
250 .filter(Repository.group_id == None)\
249 .filter(Repository.group_id == None)\
251 .order_by(func.lower(Repository.repo_name)).all()
250 .order_by(func.lower(Repository.repo_name)).all()
252 repo_iter = SimpleCachedRepoList(
251 repo_iter = SimpleCachedRepoList(
253 all_repos, repos_path=self.repos_path, order_by=sort_key)
252 all_repos, repos_path=self.repos_path, order_by=sort_key)
254 return repo_iter
253 return repo_iter
255
254
256 def get_repo_groups(self, all_groups=None):
255 def get_repo_groups(self, all_groups=None):
257 if all_groups is None:
256 if all_groups is None:
258 all_groups = RepoGroup.query()\
257 all_groups = RepoGroup.query()\
259 .filter(RepoGroup.group_parent_id == None).all()
258 .filter(RepoGroup.group_parent_id == None).all()
260 return [x for x in RepoGroupList(all_groups)]
259 return [x for x in RepoGroupList(all_groups)]
261
260
262 def mark_for_invalidation(self, repo_name, delete=False):
261 def mark_for_invalidation(self, repo_name, delete=False):
263 """
262 """
264 Mark caches of this repo invalid in the database. `delete` flag
263 Mark caches of this repo invalid in the database. `delete` flag
265 removes the cache entries
264 removes the cache entries
266
265
267 :param repo_name: the repo_name for which caches should be marked
266 :param repo_name: the repo_name for which caches should be marked
268 invalid, or deleted
267 invalid, or deleted
269 :param delete: delete the entry keys instead of setting bool
268 :param delete: delete the entry keys instead of setting bool
270 flag on them, and also purge caches used by the dogpile
269 flag on them, and also purge caches used by the dogpile
271 """
270 """
272 repo = Repository.get_by_repo_name(repo_name)
271 repo = Repository.get_by_repo_name(repo_name)
273
272
274 if repo:
273 if repo:
275 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
274 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
276 repo_id=repo.repo_id)
275 repo_id=repo.repo_id)
277 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
276 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
278
277
279 repo_id = repo.repo_id
278 repo_id = repo.repo_id
280 config = repo._config
279 config = repo._config
281 config.set('extensions', 'largefiles', '')
280 config.set('extensions', 'largefiles', '')
282 repo.update_commit_cache(config=config, cs_cache=None)
281 repo.update_commit_cache(config=config, cs_cache=None)
283 if delete:
282 if delete:
284 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
283 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
285 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
284 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
286
285
287 def toggle_following_repo(self, follow_repo_id, user_id):
286 def toggle_following_repo(self, follow_repo_id, user_id):
288
287
289 f = self.sa.query(UserFollowing)\
288 f = self.sa.query(UserFollowing)\
290 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
291 .filter(UserFollowing.user_id == user_id).scalar()
290 .filter(UserFollowing.user_id == user_id).scalar()
292
291
293 if f is not None:
292 if f is not None:
294 try:
293 try:
295 self.sa.delete(f)
294 self.sa.delete(f)
296 return
295 return
297 except Exception:
296 except Exception:
298 log.error(traceback.format_exc())
297 log.error(traceback.format_exc())
299 raise
298 raise
300
299
301 try:
300 try:
302 f = UserFollowing()
301 f = UserFollowing()
303 f.user_id = user_id
302 f.user_id = user_id
304 f.follows_repo_id = follow_repo_id
303 f.follows_repo_id = follow_repo_id
305 self.sa.add(f)
304 self.sa.add(f)
306 except Exception:
305 except Exception:
307 log.error(traceback.format_exc())
306 log.error(traceback.format_exc())
308 raise
307 raise
309
308
310 def toggle_following_user(self, follow_user_id, user_id):
309 def toggle_following_user(self, follow_user_id, user_id):
311 f = self.sa.query(UserFollowing)\
310 f = self.sa.query(UserFollowing)\
312 .filter(UserFollowing.follows_user_id == follow_user_id)\
311 .filter(UserFollowing.follows_user_id == follow_user_id)\
313 .filter(UserFollowing.user_id == user_id).scalar()
312 .filter(UserFollowing.user_id == user_id).scalar()
314
313
315 if f is not None:
314 if f is not None:
316 try:
315 try:
317 self.sa.delete(f)
316 self.sa.delete(f)
318 return
317 return
319 except Exception:
318 except Exception:
320 log.error(traceback.format_exc())
319 log.error(traceback.format_exc())
321 raise
320 raise
322
321
323 try:
322 try:
324 f = UserFollowing()
323 f = UserFollowing()
325 f.user_id = user_id
324 f.user_id = user_id
326 f.follows_user_id = follow_user_id
325 f.follows_user_id = follow_user_id
327 self.sa.add(f)
326 self.sa.add(f)
328 except Exception:
327 except Exception:
329 log.error(traceback.format_exc())
328 log.error(traceback.format_exc())
330 raise
329 raise
331
330
332 def is_following_repo(self, repo_name, user_id, cache=False):
331 def is_following_repo(self, repo_name, user_id, cache=False):
333 r = self.sa.query(Repository)\
332 r = self.sa.query(Repository)\
334 .filter(Repository.repo_name == repo_name).scalar()
333 .filter(Repository.repo_name == repo_name).scalar()
335
334
336 f = self.sa.query(UserFollowing)\
335 f = self.sa.query(UserFollowing)\
337 .filter(UserFollowing.follows_repository == r)\
336 .filter(UserFollowing.follows_repository == r)\
338 .filter(UserFollowing.user_id == user_id).scalar()
337 .filter(UserFollowing.user_id == user_id).scalar()
339
338
340 return f is not None
339 return f is not None
341
340
342 def is_following_user(self, username, user_id, cache=False):
341 def is_following_user(self, username, user_id, cache=False):
343 u = User.get_by_username(username)
342 u = User.get_by_username(username)
344
343
345 f = self.sa.query(UserFollowing)\
344 f = self.sa.query(UserFollowing)\
346 .filter(UserFollowing.follows_user == u)\
345 .filter(UserFollowing.follows_user == u)\
347 .filter(UserFollowing.user_id == user_id).scalar()
346 .filter(UserFollowing.user_id == user_id).scalar()
348
347
349 return f is not None
348 return f is not None
350
349
351 def get_followers(self, repo):
350 def get_followers(self, repo):
352 repo = self._get_repo(repo)
351 repo = self._get_repo(repo)
353
352
354 return self.sa.query(UserFollowing)\
353 return self.sa.query(UserFollowing)\
355 .filter(UserFollowing.follows_repository == repo).count()
354 .filter(UserFollowing.follows_repository == repo).count()
356
355
357 def get_forks(self, repo):
356 def get_forks(self, repo):
358 repo = self._get_repo(repo)
357 repo = self._get_repo(repo)
359 return self.sa.query(Repository)\
358 return self.sa.query(Repository)\
360 .filter(Repository.fork == repo).count()
359 .filter(Repository.fork == repo).count()
361
360
362 def get_pull_requests(self, repo):
361 def get_pull_requests(self, repo):
363 repo = self._get_repo(repo)
362 repo = self._get_repo(repo)
364 return self.sa.query(PullRequest)\
363 return self.sa.query(PullRequest)\
365 .filter(PullRequest.target_repo == repo)\
364 .filter(PullRequest.target_repo == repo)\
366 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
365 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
367
366
368 def get_artifacts(self, repo):
367 def get_artifacts(self, repo):
369 repo = self._get_repo(repo)
368 repo = self._get_repo(repo)
370 return self.sa.query(FileStore)\
369 return self.sa.query(FileStore)\
371 .filter(FileStore.repo == repo)\
370 .filter(FileStore.repo == repo)\
372 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
371 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
373
372
374 def mark_as_fork(self, repo, fork, user):
373 def mark_as_fork(self, repo, fork, user):
375 repo = self._get_repo(repo)
374 repo = self._get_repo(repo)
376 fork = self._get_repo(fork)
375 fork = self._get_repo(fork)
377 if fork and repo.repo_id == fork.repo_id:
376 if fork and repo.repo_id == fork.repo_id:
378 raise Exception("Cannot set repository as fork of itself")
377 raise Exception("Cannot set repository as fork of itself")
379
378
380 if fork and repo.repo_type != fork.repo_type:
379 if fork and repo.repo_type != fork.repo_type:
381 raise RepositoryError(
380 raise RepositoryError(
382 "Cannot set repository as fork of repository with other type")
381 "Cannot set repository as fork of repository with other type")
383
382
384 repo.fork = fork
383 repo.fork = fork
385 self.sa.add(repo)
384 self.sa.add(repo)
386 return repo
385 return repo
387
386
388 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
387 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
389 dbrepo = self._get_repo(repo)
388 dbrepo = self._get_repo(repo)
390 remote_uri = remote_uri or dbrepo.clone_uri
389 remote_uri = remote_uri or dbrepo.clone_uri
391 if not remote_uri:
390 if not remote_uri:
392 raise Exception("This repository doesn't have a clone uri")
391 raise Exception("This repository doesn't have a clone uri")
393
392
394 repo = dbrepo.scm_instance(cache=False)
393 repo = dbrepo.scm_instance(cache=False)
395 repo.config.clear_section('hooks')
394 repo.config.clear_section('hooks')
396
395
397 try:
396 try:
398 # NOTE(marcink): add extra validation so we skip invalid urls
397 # NOTE(marcink): add extra validation so we skip invalid urls
399 # this is due this tasks can be executed via scheduler without
398 # this is due this tasks can be executed via scheduler without
400 # proper validation of remote_uri
399 # proper validation of remote_uri
401 if validate_uri:
400 if validate_uri:
402 config = make_db_config(clear_session=False)
401 config = make_db_config(clear_session=False)
403 url_validator(remote_uri, dbrepo.repo_type, config)
402 url_validator(remote_uri, dbrepo.repo_type, config)
404 except InvalidCloneUrl:
403 except InvalidCloneUrl:
405 raise
404 raise
406
405
407 repo_name = dbrepo.repo_name
406 repo_name = dbrepo.repo_name
408 try:
407 try:
409 # TODO: we need to make sure those operations call proper hooks !
408 # TODO: we need to make sure those operations call proper hooks !
410 repo.fetch(remote_uri)
409 repo.fetch(remote_uri)
411
410
412 self.mark_for_invalidation(repo_name)
411 self.mark_for_invalidation(repo_name)
413 except Exception:
412 except Exception:
414 log.error(traceback.format_exc())
413 log.error(traceback.format_exc())
415 raise
414 raise
416
415
417 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
416 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
418 dbrepo = self._get_repo(repo)
417 dbrepo = self._get_repo(repo)
419 remote_uri = remote_uri or dbrepo.push_uri
418 remote_uri = remote_uri or dbrepo.push_uri
420 if not remote_uri:
419 if not remote_uri:
421 raise Exception("This repository doesn't have a clone uri")
420 raise Exception("This repository doesn't have a clone uri")
422
421
423 repo = dbrepo.scm_instance(cache=False)
422 repo = dbrepo.scm_instance(cache=False)
424 repo.config.clear_section('hooks')
423 repo.config.clear_section('hooks')
425
424
426 try:
425 try:
427 # NOTE(marcink): add extra validation so we skip invalid urls
426 # NOTE(marcink): add extra validation so we skip invalid urls
428 # this is due this tasks can be executed via scheduler without
427 # this is due this tasks can be executed via scheduler without
429 # proper validation of remote_uri
428 # proper validation of remote_uri
430 if validate_uri:
429 if validate_uri:
431 config = make_db_config(clear_session=False)
430 config = make_db_config(clear_session=False)
432 url_validator(remote_uri, dbrepo.repo_type, config)
431 url_validator(remote_uri, dbrepo.repo_type, config)
433 except InvalidCloneUrl:
432 except InvalidCloneUrl:
434 raise
433 raise
435
434
436 try:
435 try:
437 repo.push(remote_uri)
436 repo.push(remote_uri)
438 except Exception:
437 except Exception:
439 log.error(traceback.format_exc())
438 log.error(traceback.format_exc())
440 raise
439 raise
441
440
442 def commit_change(self, repo, repo_name, commit, user, author, message,
441 def commit_change(self, repo, repo_name, commit, user, author, message,
443 content, f_path):
442 content, f_path):
444 """
443 """
445 Commits changes
444 Commits changes
446
445
447 :param repo: SCM instance
446 :param repo: SCM instance
448
447
449 """
448 """
450 user = self._get_user(user)
449 user = self._get_user(user)
451
450
452 # decoding here will force that we have proper encoded values
451 # decoding here will force that we have proper encoded values
453 # in any other case this will throw exceptions and deny commit
452 # in any other case this will throw exceptions and deny commit
454 content = safe_str(content)
453 content = safe_str(content)
455 path = safe_str(f_path)
454 path = safe_str(f_path)
456 # message and author needs to be unicode
455 # message and author needs to be unicode
457 # proper backend should then translate that into required type
456 # proper backend should then translate that into required type
458 message = safe_unicode(message)
457 message = safe_unicode(message)
459 author = safe_unicode(author)
458 author = safe_unicode(author)
460 imc = repo.in_memory_commit
459 imc = repo.in_memory_commit
461 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
460 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
462 try:
461 try:
463 # TODO: handle pre-push action !
462 # TODO: handle pre-push action !
464 tip = imc.commit(
463 tip = imc.commit(
465 message=message, author=author, parents=[commit],
464 message=message, author=author, parents=[commit],
466 branch=commit.branch)
465 branch=commit.branch)
467 except Exception as e:
466 except Exception as e:
468 log.error(traceback.format_exc())
467 log.error(traceback.format_exc())
469 raise IMCCommitError(str(e))
468 raise IMCCommitError(str(e))
470 finally:
469 finally:
471 # always clear caches, if commit fails we want fresh object also
470 # always clear caches, if commit fails we want fresh object also
472 self.mark_for_invalidation(repo_name)
471 self.mark_for_invalidation(repo_name)
473
472
474 # We trigger the post-push action
473 # We trigger the post-push action
475 hooks_utils.trigger_post_push_hook(
474 hooks_utils.trigger_post_push_hook(
476 username=user.username, action='push_local', hook_type='post_push',
475 username=user.username, action='push_local', hook_type='post_push',
477 repo_name=repo_name, repo_alias=repo.alias, commit_ids=[tip.raw_id])
476 repo_name=repo_name, repo_alias=repo.alias, commit_ids=[tip.raw_id])
478 return tip
477 return tip
479
478
480 def _sanitize_path(self, f_path):
479 def _sanitize_path(self, f_path):
481 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
480 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
482 raise NonRelativePathError('%s is not an relative path' % f_path)
481 raise NonRelativePathError('%s is not an relative path' % f_path)
483 if f_path:
482 if f_path:
484 f_path = os.path.normpath(f_path)
483 f_path = os.path.normpath(f_path)
485 return f_path
484 return f_path
486
485
487 def get_dirnode_metadata(self, request, commit, dir_node):
486 def get_dirnode_metadata(self, request, commit, dir_node):
488 if not dir_node.is_dir():
487 if not dir_node.is_dir():
489 return []
488 return []
490
489
491 data = []
490 data = []
492 for node in dir_node:
491 for node in dir_node:
493 if not node.is_file():
492 if not node.is_file():
494 # we skip file-nodes
493 # we skip file-nodes
495 continue
494 continue
496
495
497 last_commit = node.last_commit
496 last_commit = node.last_commit
498 last_commit_date = last_commit.date
497 last_commit_date = last_commit.date
499 data.append({
498 data.append({
500 'name': node.name,
499 'name': node.name,
501 'size': h.format_byte_size_binary(node.size),
500 'size': h.format_byte_size_binary(node.size),
502 'modified_at': h.format_date(last_commit_date),
501 'modified_at': h.format_date(last_commit_date),
503 'modified_ts': last_commit_date.isoformat(),
502 'modified_ts': last_commit_date.isoformat(),
504 'revision': last_commit.revision,
503 'revision': last_commit.revision,
505 'short_id': last_commit.short_id,
504 'short_id': last_commit.short_id,
506 'message': h.escape(last_commit.message),
505 'message': h.escape(last_commit.message),
507 'author': h.escape(last_commit.author),
506 'author': h.escape(last_commit.author),
508 'user_profile': h.gravatar_with_user(
507 'user_profile': h.gravatar_with_user(
509 request, last_commit.author),
508 request, last_commit.author),
510 })
509 })
511
510
512 return data
511 return data
513
512
514 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
513 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
515 extended_info=False, content=False, max_file_bytes=None):
514 extended_info=False, content=False, max_file_bytes=None):
516 """
515 """
517 recursive walk in root dir and return a set of all path in that dir
516 recursive walk in root dir and return a set of all path in that dir
518 based on repository walk function
517 based on repository walk function
519
518
520 :param repo_name: name of repository
519 :param repo_name: name of repository
521 :param commit_id: commit id for which to list nodes
520 :param commit_id: commit id for which to list nodes
522 :param root_path: root path to list
521 :param root_path: root path to list
523 :param flat: return as a list, if False returns a dict with description
522 :param flat: return as a list, if False returns a dict with description
524 :param extended_info: show additional info such as md5, binary, size etc
523 :param extended_info: show additional info such as md5, binary, size etc
525 :param content: add nodes content to the return data
524 :param content: add nodes content to the return data
526 :param max_file_bytes: will not return file contents over this limit
525 :param max_file_bytes: will not return file contents over this limit
527
526
528 """
527 """
529 _files = list()
528 _files = list()
530 _dirs = list()
529 _dirs = list()
531 try:
530 try:
532 _repo = self._get_repo(repo_name)
531 _repo = self._get_repo(repo_name)
533 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
532 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
534 root_path = root_path.lstrip('/')
533 root_path = root_path.lstrip('/')
535 for __, dirs, files in commit.walk(root_path):
534 for __, dirs, files in commit.walk(root_path):
536
535
537 for f in files:
536 for f in files:
538 _content = None
537 _content = None
539 _data = f_name = f.unicode_path
538 _data = f_name = f.unicode_path
540
539
541 if not flat:
540 if not flat:
542 _data = {
541 _data = {
543 "name": h.escape(f_name),
542 "name": h.escape(f_name),
544 "type": "file",
543 "type": "file",
545 }
544 }
546 if extended_info:
545 if extended_info:
547 _data.update({
546 _data.update({
548 "md5": f.md5,
547 "md5": f.md5,
549 "binary": f.is_binary,
548 "binary": f.is_binary,
550 "size": f.size,
549 "size": f.size,
551 "extension": f.extension,
550 "extension": f.extension,
552 "mimetype": f.mimetype,
551 "mimetype": f.mimetype,
553 "lines": f.lines()[0]
552 "lines": f.lines()[0]
554 })
553 })
555
554
556 if content:
555 if content:
557 over_size_limit = (max_file_bytes is not None
556 over_size_limit = (max_file_bytes is not None
558 and f.size > max_file_bytes)
557 and f.size > max_file_bytes)
559 full_content = None
558 full_content = None
560 if not f.is_binary and not over_size_limit:
559 if not f.is_binary and not over_size_limit:
561 full_content = safe_str(f.content)
560 full_content = safe_str(f.content)
562
561
563 _data.update({
562 _data.update({
564 "content": full_content,
563 "content": full_content,
565 })
564 })
566 _files.append(_data)
565 _files.append(_data)
567
566
568 for d in dirs:
567 for d in dirs:
569 _data = d_name = d.unicode_path
568 _data = d_name = d.unicode_path
570 if not flat:
569 if not flat:
571 _data = {
570 _data = {
572 "name": h.escape(d_name),
571 "name": h.escape(d_name),
573 "type": "dir",
572 "type": "dir",
574 }
573 }
575 if extended_info:
574 if extended_info:
576 _data.update({
575 _data.update({
577 "md5": None,
576 "md5": None,
578 "binary": None,
577 "binary": None,
579 "size": None,
578 "size": None,
580 "extension": None,
579 "extension": None,
581 })
580 })
582 if content:
581 if content:
583 _data.update({
582 _data.update({
584 "content": None
583 "content": None
585 })
584 })
586 _dirs.append(_data)
585 _dirs.append(_data)
587 except RepositoryError:
586 except RepositoryError:
588 log.exception("Exception in get_nodes")
587 log.exception("Exception in get_nodes")
589 raise
588 raise
590
589
591 return _dirs, _files
590 return _dirs, _files
592
591
593 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
592 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
594 """
593 """
595 Generate files for quick filter in files view
594 Generate files for quick filter in files view
596 """
595 """
597
596
598 _files = list()
597 _files = list()
599 _dirs = list()
598 _dirs = list()
600 try:
599 try:
601 _repo = self._get_repo(repo_name)
600 _repo = self._get_repo(repo_name)
602 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
601 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
603 root_path = root_path.lstrip('/')
602 root_path = root_path.lstrip('/')
604 for __, dirs, files in commit.walk(root_path):
603 for __, dirs, files in commit.walk(root_path):
605
604
606 for f in files:
605 for f in files:
607
606
608 _data = {
607 _data = {
609 "name": h.escape(f.unicode_path),
608 "name": h.escape(f.unicode_path),
610 "type": "file",
609 "type": "file",
611 }
610 }
612
611
613 _files.append(_data)
612 _files.append(_data)
614
613
615 for d in dirs:
614 for d in dirs:
616
615
617 _data = {
616 _data = {
618 "name": h.escape(d.unicode_path),
617 "name": h.escape(d.unicode_path),
619 "type": "dir",
618 "type": "dir",
620 }
619 }
621
620
622 _dirs.append(_data)
621 _dirs.append(_data)
623 except RepositoryError:
622 except RepositoryError:
624 log.exception("Exception in get_quick_filter_nodes")
623 log.exception("Exception in get_quick_filter_nodes")
625 raise
624 raise
626
625
627 return _dirs, _files
626 return _dirs, _files
628
627
629 def get_node(self, repo_name, commit_id, file_path,
628 def get_node(self, repo_name, commit_id, file_path,
630 extended_info=False, content=False, max_file_bytes=None, cache=True):
629 extended_info=False, content=False, max_file_bytes=None, cache=True):
631 """
630 """
632 retrieve single node from commit
631 retrieve single node from commit
633 """
632 """
634 try:
633 try:
635
634
636 _repo = self._get_repo(repo_name)
635 _repo = self._get_repo(repo_name)
637 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
636 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
638
637
639 file_node = commit.get_node(file_path)
638 file_node = commit.get_node(file_path)
640 if file_node.is_dir():
639 if file_node.is_dir():
641 raise RepositoryError('The given path is a directory')
640 raise RepositoryError('The given path is a directory')
642
641
643 _content = None
642 _content = None
644 f_name = file_node.unicode_path
643 f_name = file_node.unicode_path
645
644
646 file_data = {
645 file_data = {
647 "name": h.escape(f_name),
646 "name": h.escape(f_name),
648 "type": "file",
647 "type": "file",
649 }
648 }
650
649
651 if extended_info:
650 if extended_info:
652 file_data.update({
651 file_data.update({
653 "extension": file_node.extension,
652 "extension": file_node.extension,
654 "mimetype": file_node.mimetype,
653 "mimetype": file_node.mimetype,
655 })
654 })
656
655
657 if cache:
656 if cache:
658 md5 = file_node.md5
657 md5 = file_node.md5
659 is_binary = file_node.is_binary
658 is_binary = file_node.is_binary
660 size = file_node.size
659 size = file_node.size
661 else:
660 else:
662 is_binary, md5, size, _content = file_node.metadata_uncached()
661 is_binary, md5, size, _content = file_node.metadata_uncached()
663
662
664 file_data.update({
663 file_data.update({
665 "md5": md5,
664 "md5": md5,
666 "binary": is_binary,
665 "binary": is_binary,
667 "size": size,
666 "size": size,
668 })
667 })
669
668
670 if content and cache:
669 if content and cache:
671 # get content + cache
670 # get content + cache
672 size = file_node.size
671 size = file_node.size
673 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
672 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
674 full_content = None
673 full_content = None
675 all_lines = 0
674 all_lines = 0
676 if not file_node.is_binary and not over_size_limit:
675 if not file_node.is_binary and not over_size_limit:
677 full_content = safe_unicode(file_node.content)
676 full_content = safe_unicode(file_node.content)
678 all_lines, empty_lines = file_node.count_lines(full_content)
677 all_lines, empty_lines = file_node.count_lines(full_content)
679
678
680 file_data.update({
679 file_data.update({
681 "content": full_content,
680 "content": full_content,
682 "lines": all_lines
681 "lines": all_lines
683 })
682 })
684 elif content:
683 elif content:
685 # get content *without* cache
684 # get content *without* cache
686 if _content is None:
685 if _content is None:
687 is_binary, md5, size, _content = file_node.metadata_uncached()
686 is_binary, md5, size, _content = file_node.metadata_uncached()
688
687
689 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
688 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
690 full_content = None
689 full_content = None
691 all_lines = 0
690 all_lines = 0
692 if not is_binary and not over_size_limit:
691 if not is_binary and not over_size_limit:
693 full_content = safe_unicode(_content)
692 full_content = safe_unicode(_content)
694 all_lines, empty_lines = file_node.count_lines(full_content)
693 all_lines, empty_lines = file_node.count_lines(full_content)
695
694
696 file_data.update({
695 file_data.update({
697 "content": full_content,
696 "content": full_content,
698 "lines": all_lines
697 "lines": all_lines
699 })
698 })
700
699
701 except RepositoryError:
700 except RepositoryError:
702 log.exception("Exception in get_node")
701 log.exception("Exception in get_node")
703 raise
702 raise
704
703
705 return file_data
704 return file_data
706
705
707 def get_fts_data(self, repo_name, commit_id, root_path='/'):
706 def get_fts_data(self, repo_name, commit_id, root_path='/'):
708 """
707 """
709 Fetch node tree for usage in full text search
708 Fetch node tree for usage in full text search
710 """
709 """
711
710
712 tree_info = list()
711 tree_info = list()
713
712
714 try:
713 try:
715 _repo = self._get_repo(repo_name)
714 _repo = self._get_repo(repo_name)
716 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
715 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
717 root_path = root_path.lstrip('/')
716 root_path = root_path.lstrip('/')
718 for __, dirs, files in commit.walk(root_path):
717 for __, dirs, files in commit.walk(root_path):
719
718
720 for f in files:
719 for f in files:
721 is_binary, md5, size, _content = f.metadata_uncached()
720 is_binary, md5, size, _content = f.metadata_uncached()
722 _data = {
721 _data = {
723 "name": f.unicode_path,
722 "name": f.unicode_path,
724 "md5": md5,
723 "md5": md5,
725 "extension": f.extension,
724 "extension": f.extension,
726 "binary": is_binary,
725 "binary": is_binary,
727 "size": size
726 "size": size
728 }
727 }
729
728
730 tree_info.append(_data)
729 tree_info.append(_data)
731
730
732 except RepositoryError:
731 except RepositoryError:
733 log.exception("Exception in get_nodes")
732 log.exception("Exception in get_nodes")
734 raise
733 raise
735
734
736 return tree_info
735 return tree_info
737
736
738 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
737 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
739 author=None, trigger_push_hook=True):
738 author=None, trigger_push_hook=True):
740 """
739 """
741 Commits given multiple nodes into repo
740 Commits given multiple nodes into repo
742
741
743 :param user: RhodeCode User object or user_id, the commiter
742 :param user: RhodeCode User object or user_id, the commiter
744 :param repo: RhodeCode Repository object
743 :param repo: RhodeCode Repository object
745 :param message: commit message
744 :param message: commit message
746 :param nodes: mapping {filename:{'content':content},...}
745 :param nodes: mapping {filename:{'content':content},...}
747 :param parent_commit: parent commit, can be empty than it's
746 :param parent_commit: parent commit, can be empty than it's
748 initial commit
747 initial commit
749 :param author: author of commit, cna be different that commiter
748 :param author: author of commit, cna be different that commiter
750 only for git
749 only for git
751 :param trigger_push_hook: trigger push hooks
750 :param trigger_push_hook: trigger push hooks
752
751
753 :returns: new commited commit
752 :returns: new commited commit
754 """
753 """
755
754
756 user = self._get_user(user)
755 user = self._get_user(user)
757 scm_instance = repo.scm_instance(cache=False)
756 scm_instance = repo.scm_instance(cache=False)
758
757
759 processed_nodes = []
758 processed_nodes = []
760 for f_path in nodes:
759 for f_path in nodes:
761 f_path = self._sanitize_path(f_path)
760 f_path = self._sanitize_path(f_path)
762 content = nodes[f_path]['content']
761 content = nodes[f_path]['content']
763 f_path = safe_str(f_path)
762 f_path = safe_str(f_path)
764 # decoding here will force that we have proper encoded values
763 # decoding here will force that we have proper encoded values
765 # in any other case this will throw exceptions and deny commit
764 # in any other case this will throw exceptions and deny commit
766 if isinstance(content, (basestring,)):
765 if isinstance(content, (basestring,)):
767 content = safe_str(content)
766 content = safe_str(content)
768 elif isinstance(content, (file, cStringIO.OutputType,)):
767 elif isinstance(content, (file, cStringIO.OutputType,)):
769 content = content.read()
768 content = content.read()
770 else:
769 else:
771 raise Exception('Content is of unrecognized type %s' % (
770 raise Exception('Content is of unrecognized type %s' % (
772 type(content)
771 type(content)
773 ))
772 ))
774 processed_nodes.append((f_path, content))
773 processed_nodes.append((f_path, content))
775
774
776 message = safe_unicode(message)
775 message = safe_unicode(message)
777 commiter = user.full_contact
776 commiter = user.full_contact
778 author = safe_unicode(author) if author else commiter
777 author = safe_unicode(author) if author else commiter
779
778
780 imc = scm_instance.in_memory_commit
779 imc = scm_instance.in_memory_commit
781
780
782 if not parent_commit:
781 if not parent_commit:
783 parent_commit = EmptyCommit(alias=scm_instance.alias)
782 parent_commit = EmptyCommit(alias=scm_instance.alias)
784
783
785 if isinstance(parent_commit, EmptyCommit):
784 if isinstance(parent_commit, EmptyCommit):
786 # EmptyCommit means we we're editing empty repository
785 # EmptyCommit means we we're editing empty repository
787 parents = None
786 parents = None
788 else:
787 else:
789 parents = [parent_commit]
788 parents = [parent_commit]
790 # add multiple nodes
789 # add multiple nodes
791 for path, content in processed_nodes:
790 for path, content in processed_nodes:
792 imc.add(FileNode(path, content=content))
791 imc.add(FileNode(path, content=content))
793 # TODO: handle pre push scenario
792 # TODO: handle pre push scenario
794 tip = imc.commit(message=message,
793 tip = imc.commit(message=message,
795 author=author,
794 author=author,
796 parents=parents,
795 parents=parents,
797 branch=parent_commit.branch)
796 branch=parent_commit.branch)
798
797
799 self.mark_for_invalidation(repo.repo_name)
798 self.mark_for_invalidation(repo.repo_name)
800 if trigger_push_hook:
799 if trigger_push_hook:
801 hooks_utils.trigger_post_push_hook(
800 hooks_utils.trigger_post_push_hook(
802 username=user.username, action='push_local',
801 username=user.username, action='push_local',
803 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
802 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
804 hook_type='post_push',
803 hook_type='post_push',
805 commit_ids=[tip.raw_id])
804 commit_ids=[tip.raw_id])
806 return tip
805 return tip
807
806
808 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
807 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
809 author=None, trigger_push_hook=True):
808 author=None, trigger_push_hook=True):
810 user = self._get_user(user)
809 user = self._get_user(user)
811 scm_instance = repo.scm_instance(cache=False)
810 scm_instance = repo.scm_instance(cache=False)
812
811
813 message = safe_unicode(message)
812 message = safe_unicode(message)
814 commiter = user.full_contact
813 commiter = user.full_contact
815 author = safe_unicode(author) if author else commiter
814 author = safe_unicode(author) if author else commiter
816
815
817 imc = scm_instance.in_memory_commit
816 imc = scm_instance.in_memory_commit
818
817
819 if not parent_commit:
818 if not parent_commit:
820 parent_commit = EmptyCommit(alias=scm_instance.alias)
819 parent_commit = EmptyCommit(alias=scm_instance.alias)
821
820
822 if isinstance(parent_commit, EmptyCommit):
821 if isinstance(parent_commit, EmptyCommit):
823 # EmptyCommit means we we're editing empty repository
822 # EmptyCommit means we we're editing empty repository
824 parents = None
823 parents = None
825 else:
824 else:
826 parents = [parent_commit]
825 parents = [parent_commit]
827
826
828 # add multiple nodes
827 # add multiple nodes
829 for _filename, data in nodes.items():
828 for _filename, data in nodes.items():
830 # new filename, can be renamed from the old one, also sanitaze
829 # new filename, can be renamed from the old one, also sanitaze
831 # the path for any hack around relative paths like ../../ etc.
830 # the path for any hack around relative paths like ../../ etc.
832 filename = self._sanitize_path(data['filename'])
831 filename = self._sanitize_path(data['filename'])
833 old_filename = self._sanitize_path(_filename)
832 old_filename = self._sanitize_path(_filename)
834 content = data['content']
833 content = data['content']
835 file_mode = data.get('mode')
834 file_mode = data.get('mode')
836 filenode = FileNode(old_filename, content=content, mode=file_mode)
835 filenode = FileNode(old_filename, content=content, mode=file_mode)
837 op = data['op']
836 op = data['op']
838 if op == 'add':
837 if op == 'add':
839 imc.add(filenode)
838 imc.add(filenode)
840 elif op == 'del':
839 elif op == 'del':
841 imc.remove(filenode)
840 imc.remove(filenode)
842 elif op == 'mod':
841 elif op == 'mod':
843 if filename != old_filename:
842 if filename != old_filename:
844 # TODO: handle renames more efficient, needs vcs lib changes
843 # TODO: handle renames more efficient, needs vcs lib changes
845 imc.remove(filenode)
844 imc.remove(filenode)
846 imc.add(FileNode(filename, content=content, mode=file_mode))
845 imc.add(FileNode(filename, content=content, mode=file_mode))
847 else:
846 else:
848 imc.change(filenode)
847 imc.change(filenode)
849
848
850 try:
849 try:
851 # TODO: handle pre push scenario commit changes
850 # TODO: handle pre push scenario commit changes
852 tip = imc.commit(message=message,
851 tip = imc.commit(message=message,
853 author=author,
852 author=author,
854 parents=parents,
853 parents=parents,
855 branch=parent_commit.branch)
854 branch=parent_commit.branch)
856 except NodeNotChangedError:
855 except NodeNotChangedError:
857 raise
856 raise
858 except Exception as e:
857 except Exception as e:
859 log.exception("Unexpected exception during call to imc.commit")
858 log.exception("Unexpected exception during call to imc.commit")
860 raise IMCCommitError(str(e))
859 raise IMCCommitError(str(e))
861 finally:
860 finally:
862 # always clear caches, if commit fails we want fresh object also
861 # always clear caches, if commit fails we want fresh object also
863 self.mark_for_invalidation(repo.repo_name)
862 self.mark_for_invalidation(repo.repo_name)
864
863
865 if trigger_push_hook:
864 if trigger_push_hook:
866 hooks_utils.trigger_post_push_hook(
865 hooks_utils.trigger_post_push_hook(
867 username=user.username, action='push_local', hook_type='post_push',
866 username=user.username, action='push_local', hook_type='post_push',
868 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
867 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
869 commit_ids=[tip.raw_id])
868 commit_ids=[tip.raw_id])
870
869
871 return tip
870 return tip
872
871
873 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
872 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
874 author=None, trigger_push_hook=True):
873 author=None, trigger_push_hook=True):
875 """
874 """
876 Deletes given multiple nodes into `repo`
875 Deletes given multiple nodes into `repo`
877
876
878 :param user: RhodeCode User object or user_id, the committer
877 :param user: RhodeCode User object or user_id, the committer
879 :param repo: RhodeCode Repository object
878 :param repo: RhodeCode Repository object
880 :param message: commit message
879 :param message: commit message
881 :param nodes: mapping {filename:{'content':content},...}
880 :param nodes: mapping {filename:{'content':content},...}
882 :param parent_commit: parent commit, can be empty than it's initial
881 :param parent_commit: parent commit, can be empty than it's initial
883 commit
882 commit
884 :param author: author of commit, cna be different that commiter only
883 :param author: author of commit, cna be different that commiter only
885 for git
884 for git
886 :param trigger_push_hook: trigger push hooks
885 :param trigger_push_hook: trigger push hooks
887
886
888 :returns: new commit after deletion
887 :returns: new commit after deletion
889 """
888 """
890
889
891 user = self._get_user(user)
890 user = self._get_user(user)
892 scm_instance = repo.scm_instance(cache=False)
891 scm_instance = repo.scm_instance(cache=False)
893
892
894 processed_nodes = []
893 processed_nodes = []
895 for f_path in nodes:
894 for f_path in nodes:
896 f_path = self._sanitize_path(f_path)
895 f_path = self._sanitize_path(f_path)
897 # content can be empty but for compatabilty it allows same dicts
896 # content can be empty but for compatabilty it allows same dicts
898 # structure as add_nodes
897 # structure as add_nodes
899 content = nodes[f_path].get('content')
898 content = nodes[f_path].get('content')
900 processed_nodes.append((f_path, content))
899 processed_nodes.append((f_path, content))
901
900
902 message = safe_unicode(message)
901 message = safe_unicode(message)
903 commiter = user.full_contact
902 commiter = user.full_contact
904 author = safe_unicode(author) if author else commiter
903 author = safe_unicode(author) if author else commiter
905
904
906 imc = scm_instance.in_memory_commit
905 imc = scm_instance.in_memory_commit
907
906
908 if not parent_commit:
907 if not parent_commit:
909 parent_commit = EmptyCommit(alias=scm_instance.alias)
908 parent_commit = EmptyCommit(alias=scm_instance.alias)
910
909
911 if isinstance(parent_commit, EmptyCommit):
910 if isinstance(parent_commit, EmptyCommit):
912 # EmptyCommit means we we're editing empty repository
911 # EmptyCommit means we we're editing empty repository
913 parents = None
912 parents = None
914 else:
913 else:
915 parents = [parent_commit]
914 parents = [parent_commit]
916 # add multiple nodes
915 # add multiple nodes
917 for path, content in processed_nodes:
916 for path, content in processed_nodes:
918 imc.remove(FileNode(path, content=content))
917 imc.remove(FileNode(path, content=content))
919
918
920 # TODO: handle pre push scenario
919 # TODO: handle pre push scenario
921 tip = imc.commit(message=message,
920 tip = imc.commit(message=message,
922 author=author,
921 author=author,
923 parents=parents,
922 parents=parents,
924 branch=parent_commit.branch)
923 branch=parent_commit.branch)
925
924
926 self.mark_for_invalidation(repo.repo_name)
925 self.mark_for_invalidation(repo.repo_name)
927 if trigger_push_hook:
926 if trigger_push_hook:
928 hooks_utils.trigger_post_push_hook(
927 hooks_utils.trigger_post_push_hook(
929 username=user.username, action='push_local', hook_type='post_push',
928 username=user.username, action='push_local', hook_type='post_push',
930 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
929 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
931 commit_ids=[tip.raw_id])
930 commit_ids=[tip.raw_id])
932 return tip
931 return tip
933
932
934 def strip(self, repo, commit_id, branch):
933 def strip(self, repo, commit_id, branch):
935 scm_instance = repo.scm_instance(cache=False)
934 scm_instance = repo.scm_instance(cache=False)
936 scm_instance.config.clear_section('hooks')
935 scm_instance.config.clear_section('hooks')
937 scm_instance.strip(commit_id, branch)
936 scm_instance.strip(commit_id, branch)
938 self.mark_for_invalidation(repo.repo_name)
937 self.mark_for_invalidation(repo.repo_name)
939
938
940 def get_unread_journal(self):
939 def get_unread_journal(self):
941 return self.sa.query(UserLog).count()
940 return self.sa.query(UserLog).count()
942
941
943 @classmethod
942 @classmethod
944 def backend_landing_ref(cls, repo_type):
943 def backend_landing_ref(cls, repo_type):
945 """
944 """
946 Return a default landing ref based on a repository type.
945 Return a default landing ref based on a repository type.
947 """
946 """
948
947
949 landing_ref = {
948 landing_ref = {
950 'hg': ('branch:default', 'default'),
949 'hg': ('branch:default', 'default'),
951 'git': ('branch:master', 'master'),
950 'git': ('branch:master', 'master'),
952 'svn': ('rev:tip', 'latest tip'),
951 'svn': ('rev:tip', 'latest tip'),
953 'default': ('rev:tip', 'latest tip'),
952 'default': ('rev:tip', 'latest tip'),
954 }
953 }
955
954
956 return landing_ref.get(repo_type) or landing_ref['default']
955 return landing_ref.get(repo_type) or landing_ref['default']
957
956
958 def get_repo_landing_revs(self, translator, repo=None):
957 def get_repo_landing_revs(self, translator, repo=None):
959 """
958 """
960 Generates select option with tags branches and bookmarks (for hg only)
959 Generates select option with tags branches and bookmarks (for hg only)
961 grouped by type
960 grouped by type
962
961
963 :param repo:
962 :param repo:
964 """
963 """
965 _ = translator
964 _ = translator
966 repo = self._get_repo(repo)
965 repo = self._get_repo(repo)
967
966
968 if repo:
967 if repo:
969 repo_type = repo.repo_type
968 repo_type = repo.repo_type
970 else:
969 else:
971 repo_type = 'default'
970 repo_type = 'default'
972
971
973 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
972 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
974
973
975 default_ref_options = [
974 default_ref_options = [
976 [default_landing_ref, landing_ref_lbl]
975 [default_landing_ref, landing_ref_lbl]
977 ]
976 ]
978 default_choices = [
977 default_choices = [
979 default_landing_ref
978 default_landing_ref
980 ]
979 ]
981
980
982 if not repo:
981 if not repo:
983 return default_choices, default_ref_options
982 return default_choices, default_ref_options
984
983
985 repo = repo.scm_instance()
984 repo = repo.scm_instance()
986
985
987 ref_options = [('rev:tip', 'latest tip')]
986 ref_options = [('rev:tip', 'latest tip')]
988 choices = ['rev:tip']
987 choices = ['rev:tip']
989
988
990 # branches
989 # branches
991 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
990 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
992 if not branch_group:
991 if not branch_group:
993 # new repo, or without maybe a branch?
992 # new repo, or without maybe a branch?
994 branch_group = default_ref_options
993 branch_group = default_ref_options
995
994
996 branches_group = (branch_group, _("Branches"))
995 branches_group = (branch_group, _("Branches"))
997 ref_options.append(branches_group)
996 ref_options.append(branches_group)
998 choices.extend([x[0] for x in branches_group[0]])
997 choices.extend([x[0] for x in branches_group[0]])
999
998
1000 # bookmarks for HG
999 # bookmarks for HG
1001 if repo.alias == 'hg':
1000 if repo.alias == 'hg':
1002 bookmarks_group = (
1001 bookmarks_group = (
1003 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1002 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1004 for b in repo.bookmarks],
1003 for b in repo.bookmarks],
1005 _("Bookmarks"))
1004 _("Bookmarks"))
1006 ref_options.append(bookmarks_group)
1005 ref_options.append(bookmarks_group)
1007 choices.extend([x[0] for x in bookmarks_group[0]])
1006 choices.extend([x[0] for x in bookmarks_group[0]])
1008
1007
1009 # tags
1008 # tags
1010 tags_group = (
1009 tags_group = (
1011 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1010 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1012 for t in repo.tags],
1011 for t in repo.tags],
1013 _("Tags"))
1012 _("Tags"))
1014 ref_options.append(tags_group)
1013 ref_options.append(tags_group)
1015 choices.extend([x[0] for x in tags_group[0]])
1014 choices.extend([x[0] for x in tags_group[0]])
1016
1015
1017 return choices, ref_options
1016 return choices, ref_options
1018
1017
1019 def get_server_info(self, environ=None):
1018 def get_server_info(self, environ=None):
1020 server_info = get_system_info(environ)
1019 server_info = get_system_info(environ)
1021 return server_info
1020 return server_info
General Comments 0
You need to be logged in to leave comments. Login now