##// END OF EJS Templates
auth: optimize attribute fetching in AuthList iterators
marcink -
r4149:117ca338 default
parent child Browse files
Show More
@@ -1,1019 +1,1020 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import traceback
26 import traceback
27 import logging
27 import logging
28 import cStringIO
28 import cStringIO
29
29
30 from sqlalchemy import func
30 from sqlalchemy import func
31 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import Lazy as LazyProperty
32
32
33 import rhodecode
33 import rhodecode
34 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs import get_backend
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.nodes import FileNode
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib import helpers as h, rc_cache
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 HasUserGroupPermissionAny)
41 HasUserGroupPermissionAny)
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 from rhodecode.lib import hooks_utils
43 from rhodecode.lib import hooks_utils
44 from rhodecode.lib.utils import (
44 from rhodecode.lib.utils import (
45 get_filesystem_repos, make_db_config)
45 get_filesystem_repos, make_db_config)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.lib.system_info import get_system_info
48 from rhodecode.model import BaseModel
48 from rhodecode.model import BaseModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 or_, false,
50 or_, false,
51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
52 PullRequest, FileStore)
52 PullRequest, FileStore)
53 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 class UserTemp(object):
59 class UserTemp(object):
60 def __init__(self, user_id):
60 def __init__(self, user_id):
61 self.user_id = user_id
61 self.user_id = user_id
62
62
63 def __repr__(self):
63 def __repr__(self):
64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
65
65
66
66
67 class RepoTemp(object):
67 class RepoTemp(object):
68 def __init__(self, repo_id):
68 def __init__(self, repo_id):
69 self.repo_id = repo_id
69 self.repo_id = repo_id
70
70
71 def __repr__(self):
71 def __repr__(self):
72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
73
73
74
74
75 class SimpleCachedRepoList(object):
75 class SimpleCachedRepoList(object):
76 """
76 """
77 Lighter version of of iteration of repos without the scm initialisation,
77 Lighter version of of iteration of repos without the scm initialisation,
78 and with cache usage
78 and with cache usage
79 """
79 """
80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 self.db_repo_list = db_repo_list
81 self.db_repo_list = db_repo_list
82 self.repos_path = repos_path
82 self.repos_path = repos_path
83 self.order_by = order_by
83 self.order_by = order_by
84 self.reversed = (order_by or '').startswith('-')
84 self.reversed = (order_by or '').startswith('-')
85 if not perm_set:
85 if not perm_set:
86 perm_set = ['repository.read', 'repository.write',
86 perm_set = ['repository.read', 'repository.write',
87 'repository.admin']
87 'repository.admin']
88 self.perm_set = perm_set
88 self.perm_set = perm_set
89
89
90 def __len__(self):
90 def __len__(self):
91 return len(self.db_repo_list)
91 return len(self.db_repo_list)
92
92
93 def __repr__(self):
93 def __repr__(self):
94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
95
95
96 def __iter__(self):
96 def __iter__(self):
97 for dbr in self.db_repo_list:
97 for dbr in self.db_repo_list:
98 # check permission at this level
98 # check permission at this level
99 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 has_perm = HasRepoPermissionAny(*self.perm_set)(
100 dbr.repo_name, 'SimpleCachedRepoList check')
100 dbr.repo_name, 'SimpleCachedRepoList check')
101 if not has_perm:
101 if not has_perm:
102 continue
102 continue
103
103
104 tmp_d = {
104 tmp_d = {
105 'name': dbr.repo_name,
105 'name': dbr.repo_name,
106 'dbrepo': dbr.get_dict(),
106 'dbrepo': dbr.get_dict(),
107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
108 }
108 }
109 yield tmp_d
109 yield tmp_d
110
110
111
111
112 class _PermCheckIterator(object):
112 class _PermCheckIterator(object):
113
113
114 def __init__(
114 def __init__(
115 self, obj_list, obj_attr, perm_set, perm_checker,
115 self, obj_list, obj_attr, perm_set, perm_checker,
116 extra_kwargs=None):
116 extra_kwargs=None):
117 """
117 """
118 Creates iterator from given list of objects, additionally
118 Creates iterator from given list of objects, additionally
119 checking permission for them from perm_set var
119 checking permission for them from perm_set var
120
120
121 :param obj_list: list of db objects
121 :param obj_list: list of db objects
122 :param obj_attr: attribute of object to pass into perm_checker
122 :param obj_attr: attribute of object to pass into perm_checker
123 :param perm_set: list of permissions to check
123 :param perm_set: list of permissions to check
124 :param perm_checker: callable to check permissions against
124 :param perm_checker: callable to check permissions against
125 """
125 """
126 self.obj_list = obj_list
126 self.obj_list = obj_list
127 self.obj_attr = obj_attr
127 self.obj_attr = obj_attr
128 self.perm_set = perm_set
128 self.perm_set = perm_set
129 self.perm_checker = perm_checker(*self.perm_set)
129 self.perm_checker = perm_checker(*self.perm_set)
130 self.extra_kwargs = extra_kwargs or {}
130 self.extra_kwargs = extra_kwargs or {}
131
131
132 def __len__(self):
132 def __len__(self):
133 return len(self.obj_list)
133 return len(self.obj_list)
134
134
135 def __repr__(self):
135 def __repr__(self):
136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
137
137
138 def __iter__(self):
138 def __iter__(self):
139 for db_obj in self.obj_list:
139 for db_obj in self.obj_list:
140 # check permission at this level
140 # check permission at this level
141 name = getattr(db_obj, self.obj_attr, None)
141 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
142 name = db_obj.__dict__.get(self.obj_attr, None)
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 continue
144 continue
144
145
145 yield db_obj
146 yield db_obj
146
147
147
148
148 class RepoList(_PermCheckIterator):
149 class RepoList(_PermCheckIterator):
149
150
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 if not perm_set:
152 if not perm_set:
152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153
154
154 super(RepoList, self).__init__(
155 super(RepoList, self).__init__(
155 obj_list=db_repo_list,
156 obj_list=db_repo_list,
156 obj_attr='repo_name', perm_set=perm_set,
157 obj_attr='_repo_name', perm_set=perm_set,
157 perm_checker=HasRepoPermissionAny,
158 perm_checker=HasRepoPermissionAny,
158 extra_kwargs=extra_kwargs)
159 extra_kwargs=extra_kwargs)
159
160
160
161
161 class RepoGroupList(_PermCheckIterator):
162 class RepoGroupList(_PermCheckIterator):
162
163
163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 if not perm_set:
165 if not perm_set:
165 perm_set = ['group.read', 'group.write', 'group.admin']
166 perm_set = ['group.read', 'group.write', 'group.admin']
166
167
167 super(RepoGroupList, self).__init__(
168 super(RepoGroupList, self).__init__(
168 obj_list=db_repo_group_list,
169 obj_list=db_repo_group_list,
169 obj_attr='group_name', perm_set=perm_set,
170 obj_attr='_group_name', perm_set=perm_set,
170 perm_checker=HasRepoGroupPermissionAny,
171 perm_checker=HasRepoGroupPermissionAny,
171 extra_kwargs=extra_kwargs)
172 extra_kwargs=extra_kwargs)
172
173
173
174
174 class UserGroupList(_PermCheckIterator):
175 class UserGroupList(_PermCheckIterator):
175
176
176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 if not perm_set:
178 if not perm_set:
178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179
180
180 super(UserGroupList, self).__init__(
181 super(UserGroupList, self).__init__(
181 obj_list=db_user_group_list,
182 obj_list=db_user_group_list,
182 obj_attr='users_group_name', perm_set=perm_set,
183 obj_attr='users_group_name', perm_set=perm_set,
183 perm_checker=HasUserGroupPermissionAny,
184 perm_checker=HasUserGroupPermissionAny,
184 extra_kwargs=extra_kwargs)
185 extra_kwargs=extra_kwargs)
185
186
186
187
187 class ScmModel(BaseModel):
188 class ScmModel(BaseModel):
188 """
189 """
189 Generic Scm Model
190 Generic Scm Model
190 """
191 """
191
192
192 @LazyProperty
193 @LazyProperty
193 def repos_path(self):
194 def repos_path(self):
194 """
195 """
195 Gets the repositories root path from database
196 Gets the repositories root path from database
196 """
197 """
197
198
198 settings_model = VcsSettingsModel(sa=self.sa)
199 settings_model = VcsSettingsModel(sa=self.sa)
199 return settings_model.get_repos_location()
200 return settings_model.get_repos_location()
200
201
201 def repo_scan(self, repos_path=None):
202 def repo_scan(self, repos_path=None):
202 """
203 """
203 Listing of repositories in given path. This path should not be a
204 Listing of repositories in given path. This path should not be a
204 repository itself. Return a dictionary of repository objects
205 repository itself. Return a dictionary of repository objects
205
206
206 :param repos_path: path to directory containing repositories
207 :param repos_path: path to directory containing repositories
207 """
208 """
208
209
209 if repos_path is None:
210 if repos_path is None:
210 repos_path = self.repos_path
211 repos_path = self.repos_path
211
212
212 log.info('scanning for repositories in %s', repos_path)
213 log.info('scanning for repositories in %s', repos_path)
213
214
214 config = make_db_config()
215 config = make_db_config()
215 config.set('extensions', 'largefiles', '')
216 config.set('extensions', 'largefiles', '')
216 repos = {}
217 repos = {}
217
218
218 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 # name need to be decomposed and put back together using the /
220 # name need to be decomposed and put back together using the /
220 # since this is internal storage separator for rhodecode
221 # since this is internal storage separator for rhodecode
221 name = Repository.normalize_repo_name(name)
222 name = Repository.normalize_repo_name(name)
222
223
223 try:
224 try:
224 if name in repos:
225 if name in repos:
225 raise RepositoryError('Duplicate repository name %s '
226 raise RepositoryError('Duplicate repository name %s '
226 'found in %s' % (name, path))
227 'found in %s' % (name, path))
227 elif path[0] in rhodecode.BACKENDS:
228 elif path[0] in rhodecode.BACKENDS:
228 backend = get_backend(path[0])
229 backend = get_backend(path[0])
229 repos[name] = backend(path[1], config=config,
230 repos[name] = backend(path[1], config=config,
230 with_wire={"cache": False})
231 with_wire={"cache": False})
231 except OSError:
232 except OSError:
232 continue
233 continue
233 log.debug('found %s paths with repositories', len(repos))
234 log.debug('found %s paths with repositories', len(repos))
234 return repos
235 return repos
235
236
236 def get_repos(self, all_repos=None, sort_key=None):
237 def get_repos(self, all_repos=None, sort_key=None):
237 """
238 """
238 Get all repositories from db and for each repo create it's
239 Get all repositories from db and for each repo create it's
239 backend instance and fill that backed with information from database
240 backend instance and fill that backed with information from database
240
241
241 :param all_repos: list of repository names as strings
242 :param all_repos: list of repository names as strings
242 give specific repositories list, good for filtering
243 give specific repositories list, good for filtering
243
244
244 :param sort_key: initial sorting of repositories
245 :param sort_key: initial sorting of repositories
245 """
246 """
246 if all_repos is None:
247 if all_repos is None:
247 all_repos = self.sa.query(Repository)\
248 all_repos = self.sa.query(Repository)\
248 .filter(Repository.group_id == None)\
249 .filter(Repository.group_id == None)\
249 .order_by(func.lower(Repository.repo_name)).all()
250 .order_by(func.lower(Repository.repo_name)).all()
250 repo_iter = SimpleCachedRepoList(
251 repo_iter = SimpleCachedRepoList(
251 all_repos, repos_path=self.repos_path, order_by=sort_key)
252 all_repos, repos_path=self.repos_path, order_by=sort_key)
252 return repo_iter
253 return repo_iter
253
254
254 def get_repo_groups(self, all_groups=None):
255 def get_repo_groups(self, all_groups=None):
255 if all_groups is None:
256 if all_groups is None:
256 all_groups = RepoGroup.query()\
257 all_groups = RepoGroup.query()\
257 .filter(RepoGroup.group_parent_id == None).all()
258 .filter(RepoGroup.group_parent_id == None).all()
258 return [x for x in RepoGroupList(all_groups)]
259 return [x for x in RepoGroupList(all_groups)]
259
260
260 def mark_for_invalidation(self, repo_name, delete=False):
261 def mark_for_invalidation(self, repo_name, delete=False):
261 """
262 """
262 Mark caches of this repo invalid in the database. `delete` flag
263 Mark caches of this repo invalid in the database. `delete` flag
263 removes the cache entries
264 removes the cache entries
264
265
265 :param repo_name: the repo_name for which caches should be marked
266 :param repo_name: the repo_name for which caches should be marked
266 invalid, or deleted
267 invalid, or deleted
267 :param delete: delete the entry keys instead of setting bool
268 :param delete: delete the entry keys instead of setting bool
268 flag on them, and also purge caches used by the dogpile
269 flag on them, and also purge caches used by the dogpile
269 """
270 """
270 repo = Repository.get_by_repo_name(repo_name)
271 repo = Repository.get_by_repo_name(repo_name)
271
272
272 if repo:
273 if repo:
273 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
274 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
274 repo_id=repo.repo_id)
275 repo_id=repo.repo_id)
275 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
276 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
276
277
277 repo_id = repo.repo_id
278 repo_id = repo.repo_id
278 config = repo._config
279 config = repo._config
279 config.set('extensions', 'largefiles', '')
280 config.set('extensions', 'largefiles', '')
280 repo.update_commit_cache(config=config, cs_cache=None)
281 repo.update_commit_cache(config=config, cs_cache=None)
281 if delete:
282 if delete:
282 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
283 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
283 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
284 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
284
285
285 def toggle_following_repo(self, follow_repo_id, user_id):
286 def toggle_following_repo(self, follow_repo_id, user_id):
286
287
287 f = self.sa.query(UserFollowing)\
288 f = self.sa.query(UserFollowing)\
288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 .filter(UserFollowing.user_id == user_id).scalar()
290 .filter(UserFollowing.user_id == user_id).scalar()
290
291
291 if f is not None:
292 if f is not None:
292 try:
293 try:
293 self.sa.delete(f)
294 self.sa.delete(f)
294 return
295 return
295 except Exception:
296 except Exception:
296 log.error(traceback.format_exc())
297 log.error(traceback.format_exc())
297 raise
298 raise
298
299
299 try:
300 try:
300 f = UserFollowing()
301 f = UserFollowing()
301 f.user_id = user_id
302 f.user_id = user_id
302 f.follows_repo_id = follow_repo_id
303 f.follows_repo_id = follow_repo_id
303 self.sa.add(f)
304 self.sa.add(f)
304 except Exception:
305 except Exception:
305 log.error(traceback.format_exc())
306 log.error(traceback.format_exc())
306 raise
307 raise
307
308
308 def toggle_following_user(self, follow_user_id, user_id):
309 def toggle_following_user(self, follow_user_id, user_id):
309 f = self.sa.query(UserFollowing)\
310 f = self.sa.query(UserFollowing)\
310 .filter(UserFollowing.follows_user_id == follow_user_id)\
311 .filter(UserFollowing.follows_user_id == follow_user_id)\
311 .filter(UserFollowing.user_id == user_id).scalar()
312 .filter(UserFollowing.user_id == user_id).scalar()
312
313
313 if f is not None:
314 if f is not None:
314 try:
315 try:
315 self.sa.delete(f)
316 self.sa.delete(f)
316 return
317 return
317 except Exception:
318 except Exception:
318 log.error(traceback.format_exc())
319 log.error(traceback.format_exc())
319 raise
320 raise
320
321
321 try:
322 try:
322 f = UserFollowing()
323 f = UserFollowing()
323 f.user_id = user_id
324 f.user_id = user_id
324 f.follows_user_id = follow_user_id
325 f.follows_user_id = follow_user_id
325 self.sa.add(f)
326 self.sa.add(f)
326 except Exception:
327 except Exception:
327 log.error(traceback.format_exc())
328 log.error(traceback.format_exc())
328 raise
329 raise
329
330
330 def is_following_repo(self, repo_name, user_id, cache=False):
331 def is_following_repo(self, repo_name, user_id, cache=False):
331 r = self.sa.query(Repository)\
332 r = self.sa.query(Repository)\
332 .filter(Repository.repo_name == repo_name).scalar()
333 .filter(Repository.repo_name == repo_name).scalar()
333
334
334 f = self.sa.query(UserFollowing)\
335 f = self.sa.query(UserFollowing)\
335 .filter(UserFollowing.follows_repository == r)\
336 .filter(UserFollowing.follows_repository == r)\
336 .filter(UserFollowing.user_id == user_id).scalar()
337 .filter(UserFollowing.user_id == user_id).scalar()
337
338
338 return f is not None
339 return f is not None
339
340
340 def is_following_user(self, username, user_id, cache=False):
341 def is_following_user(self, username, user_id, cache=False):
341 u = User.get_by_username(username)
342 u = User.get_by_username(username)
342
343
343 f = self.sa.query(UserFollowing)\
344 f = self.sa.query(UserFollowing)\
344 .filter(UserFollowing.follows_user == u)\
345 .filter(UserFollowing.follows_user == u)\
345 .filter(UserFollowing.user_id == user_id).scalar()
346 .filter(UserFollowing.user_id == user_id).scalar()
346
347
347 return f is not None
348 return f is not None
348
349
349 def get_followers(self, repo):
350 def get_followers(self, repo):
350 repo = self._get_repo(repo)
351 repo = self._get_repo(repo)
351
352
352 return self.sa.query(UserFollowing)\
353 return self.sa.query(UserFollowing)\
353 .filter(UserFollowing.follows_repository == repo).count()
354 .filter(UserFollowing.follows_repository == repo).count()
354
355
355 def get_forks(self, repo):
356 def get_forks(self, repo):
356 repo = self._get_repo(repo)
357 repo = self._get_repo(repo)
357 return self.sa.query(Repository)\
358 return self.sa.query(Repository)\
358 .filter(Repository.fork == repo).count()
359 .filter(Repository.fork == repo).count()
359
360
360 def get_pull_requests(self, repo):
361 def get_pull_requests(self, repo):
361 repo = self._get_repo(repo)
362 repo = self._get_repo(repo)
362 return self.sa.query(PullRequest)\
363 return self.sa.query(PullRequest)\
363 .filter(PullRequest.target_repo == repo)\
364 .filter(PullRequest.target_repo == repo)\
364 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
365 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
365
366
366 def get_artifacts(self, repo):
367 def get_artifacts(self, repo):
367 repo = self._get_repo(repo)
368 repo = self._get_repo(repo)
368 return self.sa.query(FileStore)\
369 return self.sa.query(FileStore)\
369 .filter(FileStore.repo == repo)\
370 .filter(FileStore.repo == repo)\
370 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
371 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
371
372
372 def mark_as_fork(self, repo, fork, user):
373 def mark_as_fork(self, repo, fork, user):
373 repo = self._get_repo(repo)
374 repo = self._get_repo(repo)
374 fork = self._get_repo(fork)
375 fork = self._get_repo(fork)
375 if fork and repo.repo_id == fork.repo_id:
376 if fork and repo.repo_id == fork.repo_id:
376 raise Exception("Cannot set repository as fork of itself")
377 raise Exception("Cannot set repository as fork of itself")
377
378
378 if fork and repo.repo_type != fork.repo_type:
379 if fork and repo.repo_type != fork.repo_type:
379 raise RepositoryError(
380 raise RepositoryError(
380 "Cannot set repository as fork of repository with other type")
381 "Cannot set repository as fork of repository with other type")
381
382
382 repo.fork = fork
383 repo.fork = fork
383 self.sa.add(repo)
384 self.sa.add(repo)
384 return repo
385 return repo
385
386
386 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
387 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
387 dbrepo = self._get_repo(repo)
388 dbrepo = self._get_repo(repo)
388 remote_uri = remote_uri or dbrepo.clone_uri
389 remote_uri = remote_uri or dbrepo.clone_uri
389 if not remote_uri:
390 if not remote_uri:
390 raise Exception("This repository doesn't have a clone uri")
391 raise Exception("This repository doesn't have a clone uri")
391
392
392 repo = dbrepo.scm_instance(cache=False)
393 repo = dbrepo.scm_instance(cache=False)
393 repo.config.clear_section('hooks')
394 repo.config.clear_section('hooks')
394
395
395 try:
396 try:
396 # NOTE(marcink): add extra validation so we skip invalid urls
397 # NOTE(marcink): add extra validation so we skip invalid urls
397 # this is due this tasks can be executed via scheduler without
398 # this is due this tasks can be executed via scheduler without
398 # proper validation of remote_uri
399 # proper validation of remote_uri
399 if validate_uri:
400 if validate_uri:
400 config = make_db_config(clear_session=False)
401 config = make_db_config(clear_session=False)
401 url_validator(remote_uri, dbrepo.repo_type, config)
402 url_validator(remote_uri, dbrepo.repo_type, config)
402 except InvalidCloneUrl:
403 except InvalidCloneUrl:
403 raise
404 raise
404
405
405 repo_name = dbrepo.repo_name
406 repo_name = dbrepo.repo_name
406 try:
407 try:
407 # TODO: we need to make sure those operations call proper hooks !
408 # TODO: we need to make sure those operations call proper hooks !
408 repo.fetch(remote_uri)
409 repo.fetch(remote_uri)
409
410
410 self.mark_for_invalidation(repo_name)
411 self.mark_for_invalidation(repo_name)
411 except Exception:
412 except Exception:
412 log.error(traceback.format_exc())
413 log.error(traceback.format_exc())
413 raise
414 raise
414
415
415 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
416 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
416 dbrepo = self._get_repo(repo)
417 dbrepo = self._get_repo(repo)
417 remote_uri = remote_uri or dbrepo.push_uri
418 remote_uri = remote_uri or dbrepo.push_uri
418 if not remote_uri:
419 if not remote_uri:
419 raise Exception("This repository doesn't have a clone uri")
420 raise Exception("This repository doesn't have a clone uri")
420
421
421 repo = dbrepo.scm_instance(cache=False)
422 repo = dbrepo.scm_instance(cache=False)
422 repo.config.clear_section('hooks')
423 repo.config.clear_section('hooks')
423
424
424 try:
425 try:
425 # NOTE(marcink): add extra validation so we skip invalid urls
426 # NOTE(marcink): add extra validation so we skip invalid urls
426 # this is due this tasks can be executed via scheduler without
427 # this is due this tasks can be executed via scheduler without
427 # proper validation of remote_uri
428 # proper validation of remote_uri
428 if validate_uri:
429 if validate_uri:
429 config = make_db_config(clear_session=False)
430 config = make_db_config(clear_session=False)
430 url_validator(remote_uri, dbrepo.repo_type, config)
431 url_validator(remote_uri, dbrepo.repo_type, config)
431 except InvalidCloneUrl:
432 except InvalidCloneUrl:
432 raise
433 raise
433
434
434 try:
435 try:
435 repo.push(remote_uri)
436 repo.push(remote_uri)
436 except Exception:
437 except Exception:
437 log.error(traceback.format_exc())
438 log.error(traceback.format_exc())
438 raise
439 raise
439
440
440 def commit_change(self, repo, repo_name, commit, user, author, message,
441 def commit_change(self, repo, repo_name, commit, user, author, message,
441 content, f_path):
442 content, f_path):
442 """
443 """
443 Commits changes
444 Commits changes
444
445
445 :param repo: SCM instance
446 :param repo: SCM instance
446
447
447 """
448 """
448 user = self._get_user(user)
449 user = self._get_user(user)
449
450
450 # decoding here will force that we have proper encoded values
451 # decoding here will force that we have proper encoded values
451 # in any other case this will throw exceptions and deny commit
452 # in any other case this will throw exceptions and deny commit
452 content = safe_str(content)
453 content = safe_str(content)
453 path = safe_str(f_path)
454 path = safe_str(f_path)
454 # message and author needs to be unicode
455 # message and author needs to be unicode
455 # proper backend should then translate that into required type
456 # proper backend should then translate that into required type
456 message = safe_unicode(message)
457 message = safe_unicode(message)
457 author = safe_unicode(author)
458 author = safe_unicode(author)
458 imc = repo.in_memory_commit
459 imc = repo.in_memory_commit
459 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
460 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
460 try:
461 try:
461 # TODO: handle pre-push action !
462 # TODO: handle pre-push action !
462 tip = imc.commit(
463 tip = imc.commit(
463 message=message, author=author, parents=[commit],
464 message=message, author=author, parents=[commit],
464 branch=commit.branch)
465 branch=commit.branch)
465 except Exception as e:
466 except Exception as e:
466 log.error(traceback.format_exc())
467 log.error(traceback.format_exc())
467 raise IMCCommitError(str(e))
468 raise IMCCommitError(str(e))
468 finally:
469 finally:
469 # always clear caches, if commit fails we want fresh object also
470 # always clear caches, if commit fails we want fresh object also
470 self.mark_for_invalidation(repo_name)
471 self.mark_for_invalidation(repo_name)
471
472
472 # We trigger the post-push action
473 # We trigger the post-push action
473 hooks_utils.trigger_post_push_hook(
474 hooks_utils.trigger_post_push_hook(
474 username=user.username, action='push_local', hook_type='post_push',
475 username=user.username, action='push_local', hook_type='post_push',
475 repo_name=repo_name, repo_alias=repo.alias, commit_ids=[tip.raw_id])
476 repo_name=repo_name, repo_alias=repo.alias, commit_ids=[tip.raw_id])
476 return tip
477 return tip
477
478
478 def _sanitize_path(self, f_path):
479 def _sanitize_path(self, f_path):
479 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
480 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
480 raise NonRelativePathError('%s is not an relative path' % f_path)
481 raise NonRelativePathError('%s is not an relative path' % f_path)
481 if f_path:
482 if f_path:
482 f_path = os.path.normpath(f_path)
483 f_path = os.path.normpath(f_path)
483 return f_path
484 return f_path
484
485
485 def get_dirnode_metadata(self, request, commit, dir_node):
486 def get_dirnode_metadata(self, request, commit, dir_node):
486 if not dir_node.is_dir():
487 if not dir_node.is_dir():
487 return []
488 return []
488
489
489 data = []
490 data = []
490 for node in dir_node:
491 for node in dir_node:
491 if not node.is_file():
492 if not node.is_file():
492 # we skip file-nodes
493 # we skip file-nodes
493 continue
494 continue
494
495
495 last_commit = node.last_commit
496 last_commit = node.last_commit
496 last_commit_date = last_commit.date
497 last_commit_date = last_commit.date
497 data.append({
498 data.append({
498 'name': node.name,
499 'name': node.name,
499 'size': h.format_byte_size_binary(node.size),
500 'size': h.format_byte_size_binary(node.size),
500 'modified_at': h.format_date(last_commit_date),
501 'modified_at': h.format_date(last_commit_date),
501 'modified_ts': last_commit_date.isoformat(),
502 'modified_ts': last_commit_date.isoformat(),
502 'revision': last_commit.revision,
503 'revision': last_commit.revision,
503 'short_id': last_commit.short_id,
504 'short_id': last_commit.short_id,
504 'message': h.escape(last_commit.message),
505 'message': h.escape(last_commit.message),
505 'author': h.escape(last_commit.author),
506 'author': h.escape(last_commit.author),
506 'user_profile': h.gravatar_with_user(
507 'user_profile': h.gravatar_with_user(
507 request, last_commit.author),
508 request, last_commit.author),
508 })
509 })
509
510
510 return data
511 return data
511
512
512 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
513 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
513 extended_info=False, content=False, max_file_bytes=None):
514 extended_info=False, content=False, max_file_bytes=None):
514 """
515 """
515 recursive walk in root dir and return a set of all path in that dir
516 recursive walk in root dir and return a set of all path in that dir
516 based on repository walk function
517 based on repository walk function
517
518
518 :param repo_name: name of repository
519 :param repo_name: name of repository
519 :param commit_id: commit id for which to list nodes
520 :param commit_id: commit id for which to list nodes
520 :param root_path: root path to list
521 :param root_path: root path to list
521 :param flat: return as a list, if False returns a dict with description
522 :param flat: return as a list, if False returns a dict with description
522 :param extended_info: show additional info such as md5, binary, size etc
523 :param extended_info: show additional info such as md5, binary, size etc
523 :param content: add nodes content to the return data
524 :param content: add nodes content to the return data
524 :param max_file_bytes: will not return file contents over this limit
525 :param max_file_bytes: will not return file contents over this limit
525
526
526 """
527 """
527 _files = list()
528 _files = list()
528 _dirs = list()
529 _dirs = list()
529 try:
530 try:
530 _repo = self._get_repo(repo_name)
531 _repo = self._get_repo(repo_name)
531 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
532 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
532 root_path = root_path.lstrip('/')
533 root_path = root_path.lstrip('/')
533 for __, dirs, files in commit.walk(root_path):
534 for __, dirs, files in commit.walk(root_path):
534
535
535 for f in files:
536 for f in files:
536 _content = None
537 _content = None
537 _data = f_name = f.unicode_path
538 _data = f_name = f.unicode_path
538
539
539 if not flat:
540 if not flat:
540 _data = {
541 _data = {
541 "name": h.escape(f_name),
542 "name": h.escape(f_name),
542 "type": "file",
543 "type": "file",
543 }
544 }
544 if extended_info:
545 if extended_info:
545 _data.update({
546 _data.update({
546 "md5": f.md5,
547 "md5": f.md5,
547 "binary": f.is_binary,
548 "binary": f.is_binary,
548 "size": f.size,
549 "size": f.size,
549 "extension": f.extension,
550 "extension": f.extension,
550 "mimetype": f.mimetype,
551 "mimetype": f.mimetype,
551 "lines": f.lines()[0]
552 "lines": f.lines()[0]
552 })
553 })
553
554
554 if content:
555 if content:
555 over_size_limit = (max_file_bytes is not None
556 over_size_limit = (max_file_bytes is not None
556 and f.size > max_file_bytes)
557 and f.size > max_file_bytes)
557 full_content = None
558 full_content = None
558 if not f.is_binary and not over_size_limit:
559 if not f.is_binary and not over_size_limit:
559 full_content = safe_str(f.content)
560 full_content = safe_str(f.content)
560
561
561 _data.update({
562 _data.update({
562 "content": full_content,
563 "content": full_content,
563 })
564 })
564 _files.append(_data)
565 _files.append(_data)
565
566
566 for d in dirs:
567 for d in dirs:
567 _data = d_name = d.unicode_path
568 _data = d_name = d.unicode_path
568 if not flat:
569 if not flat:
569 _data = {
570 _data = {
570 "name": h.escape(d_name),
571 "name": h.escape(d_name),
571 "type": "dir",
572 "type": "dir",
572 }
573 }
573 if extended_info:
574 if extended_info:
574 _data.update({
575 _data.update({
575 "md5": None,
576 "md5": None,
576 "binary": None,
577 "binary": None,
577 "size": None,
578 "size": None,
578 "extension": None,
579 "extension": None,
579 })
580 })
580 if content:
581 if content:
581 _data.update({
582 _data.update({
582 "content": None
583 "content": None
583 })
584 })
584 _dirs.append(_data)
585 _dirs.append(_data)
585 except RepositoryError:
586 except RepositoryError:
586 log.exception("Exception in get_nodes")
587 log.exception("Exception in get_nodes")
587 raise
588 raise
588
589
589 return _dirs, _files
590 return _dirs, _files
590
591
591 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
592 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
592 """
593 """
593 Generate files for quick filter in files view
594 Generate files for quick filter in files view
594 """
595 """
595
596
596 _files = list()
597 _files = list()
597 _dirs = list()
598 _dirs = list()
598 try:
599 try:
599 _repo = self._get_repo(repo_name)
600 _repo = self._get_repo(repo_name)
600 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
601 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
601 root_path = root_path.lstrip('/')
602 root_path = root_path.lstrip('/')
602 for __, dirs, files in commit.walk(root_path):
603 for __, dirs, files in commit.walk(root_path):
603
604
604 for f in files:
605 for f in files:
605
606
606 _data = {
607 _data = {
607 "name": h.escape(f.unicode_path),
608 "name": h.escape(f.unicode_path),
608 "type": "file",
609 "type": "file",
609 }
610 }
610
611
611 _files.append(_data)
612 _files.append(_data)
612
613
613 for d in dirs:
614 for d in dirs:
614
615
615 _data = {
616 _data = {
616 "name": h.escape(d.unicode_path),
617 "name": h.escape(d.unicode_path),
617 "type": "dir",
618 "type": "dir",
618 }
619 }
619
620
620 _dirs.append(_data)
621 _dirs.append(_data)
621 except RepositoryError:
622 except RepositoryError:
622 log.exception("Exception in get_quick_filter_nodes")
623 log.exception("Exception in get_quick_filter_nodes")
623 raise
624 raise
624
625
625 return _dirs, _files
626 return _dirs, _files
626
627
627 def get_node(self, repo_name, commit_id, file_path,
628 def get_node(self, repo_name, commit_id, file_path,
628 extended_info=False, content=False, max_file_bytes=None, cache=True):
629 extended_info=False, content=False, max_file_bytes=None, cache=True):
629 """
630 """
630 retrieve single node from commit
631 retrieve single node from commit
631 """
632 """
632 try:
633 try:
633
634
634 _repo = self._get_repo(repo_name)
635 _repo = self._get_repo(repo_name)
635 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
636 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
636
637
637 file_node = commit.get_node(file_path)
638 file_node = commit.get_node(file_path)
638 if file_node.is_dir():
639 if file_node.is_dir():
639 raise RepositoryError('The given path is a directory')
640 raise RepositoryError('The given path is a directory')
640
641
641 _content = None
642 _content = None
642 f_name = file_node.unicode_path
643 f_name = file_node.unicode_path
643
644
644 file_data = {
645 file_data = {
645 "name": h.escape(f_name),
646 "name": h.escape(f_name),
646 "type": "file",
647 "type": "file",
647 }
648 }
648
649
649 if extended_info:
650 if extended_info:
650 file_data.update({
651 file_data.update({
651 "extension": file_node.extension,
652 "extension": file_node.extension,
652 "mimetype": file_node.mimetype,
653 "mimetype": file_node.mimetype,
653 })
654 })
654
655
655 if cache:
656 if cache:
656 md5 = file_node.md5
657 md5 = file_node.md5
657 is_binary = file_node.is_binary
658 is_binary = file_node.is_binary
658 size = file_node.size
659 size = file_node.size
659 else:
660 else:
660 is_binary, md5, size, _content = file_node.metadata_uncached()
661 is_binary, md5, size, _content = file_node.metadata_uncached()
661
662
662 file_data.update({
663 file_data.update({
663 "md5": md5,
664 "md5": md5,
664 "binary": is_binary,
665 "binary": is_binary,
665 "size": size,
666 "size": size,
666 })
667 })
667
668
668 if content and cache:
669 if content and cache:
669 # get content + cache
670 # get content + cache
670 size = file_node.size
671 size = file_node.size
671 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
672 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
672 full_content = None
673 full_content = None
673 all_lines = 0
674 all_lines = 0
674 if not file_node.is_binary and not over_size_limit:
675 if not file_node.is_binary and not over_size_limit:
675 full_content = safe_unicode(file_node.content)
676 full_content = safe_unicode(file_node.content)
676 all_lines, empty_lines = file_node.count_lines(full_content)
677 all_lines, empty_lines = file_node.count_lines(full_content)
677
678
678 file_data.update({
679 file_data.update({
679 "content": full_content,
680 "content": full_content,
680 "lines": all_lines
681 "lines": all_lines
681 })
682 })
682 elif content:
683 elif content:
683 # get content *without* cache
684 # get content *without* cache
684 if _content is None:
685 if _content is None:
685 is_binary, md5, size, _content = file_node.metadata_uncached()
686 is_binary, md5, size, _content = file_node.metadata_uncached()
686
687
687 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
688 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
688 full_content = None
689 full_content = None
689 all_lines = 0
690 all_lines = 0
690 if not is_binary and not over_size_limit:
691 if not is_binary and not over_size_limit:
691 full_content = safe_unicode(_content)
692 full_content = safe_unicode(_content)
692 all_lines, empty_lines = file_node.count_lines(full_content)
693 all_lines, empty_lines = file_node.count_lines(full_content)
693
694
694 file_data.update({
695 file_data.update({
695 "content": full_content,
696 "content": full_content,
696 "lines": all_lines
697 "lines": all_lines
697 })
698 })
698
699
699 except RepositoryError:
700 except RepositoryError:
700 log.exception("Exception in get_node")
701 log.exception("Exception in get_node")
701 raise
702 raise
702
703
703 return file_data
704 return file_data
704
705
705 def get_fts_data(self, repo_name, commit_id, root_path='/'):
706 def get_fts_data(self, repo_name, commit_id, root_path='/'):
706 """
707 """
707 Fetch node tree for usage in full text search
708 Fetch node tree for usage in full text search
708 """
709 """
709
710
710 tree_info = list()
711 tree_info = list()
711
712
712 try:
713 try:
713 _repo = self._get_repo(repo_name)
714 _repo = self._get_repo(repo_name)
714 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
715 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
715 root_path = root_path.lstrip('/')
716 root_path = root_path.lstrip('/')
716 for __, dirs, files in commit.walk(root_path):
717 for __, dirs, files in commit.walk(root_path):
717
718
718 for f in files:
719 for f in files:
719 is_binary, md5, size, _content = f.metadata_uncached()
720 is_binary, md5, size, _content = f.metadata_uncached()
720 _data = {
721 _data = {
721 "name": f.unicode_path,
722 "name": f.unicode_path,
722 "md5": md5,
723 "md5": md5,
723 "extension": f.extension,
724 "extension": f.extension,
724 "binary": is_binary,
725 "binary": is_binary,
725 "size": size
726 "size": size
726 }
727 }
727
728
728 tree_info.append(_data)
729 tree_info.append(_data)
729
730
730 except RepositoryError:
731 except RepositoryError:
731 log.exception("Exception in get_nodes")
732 log.exception("Exception in get_nodes")
732 raise
733 raise
733
734
734 return tree_info
735 return tree_info
735
736
736 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
737 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
737 author=None, trigger_push_hook=True):
738 author=None, trigger_push_hook=True):
738 """
739 """
739 Commits given multiple nodes into repo
740 Commits given multiple nodes into repo
740
741
741 :param user: RhodeCode User object or user_id, the commiter
742 :param user: RhodeCode User object or user_id, the commiter
742 :param repo: RhodeCode Repository object
743 :param repo: RhodeCode Repository object
743 :param message: commit message
744 :param message: commit message
744 :param nodes: mapping {filename:{'content':content},...}
745 :param nodes: mapping {filename:{'content':content},...}
745 :param parent_commit: parent commit, can be empty than it's
746 :param parent_commit: parent commit, can be empty than it's
746 initial commit
747 initial commit
747 :param author: author of commit, cna be different that commiter
748 :param author: author of commit, cna be different that commiter
748 only for git
749 only for git
749 :param trigger_push_hook: trigger push hooks
750 :param trigger_push_hook: trigger push hooks
750
751
751 :returns: new commited commit
752 :returns: new commited commit
752 """
753 """
753
754
754 user = self._get_user(user)
755 user = self._get_user(user)
755 scm_instance = repo.scm_instance(cache=False)
756 scm_instance = repo.scm_instance(cache=False)
756
757
757 processed_nodes = []
758 processed_nodes = []
758 for f_path in nodes:
759 for f_path in nodes:
759 f_path = self._sanitize_path(f_path)
760 f_path = self._sanitize_path(f_path)
760 content = nodes[f_path]['content']
761 content = nodes[f_path]['content']
761 f_path = safe_str(f_path)
762 f_path = safe_str(f_path)
762 # decoding here will force that we have proper encoded values
763 # decoding here will force that we have proper encoded values
763 # in any other case this will throw exceptions and deny commit
764 # in any other case this will throw exceptions and deny commit
764 if isinstance(content, (basestring,)):
765 if isinstance(content, (basestring,)):
765 content = safe_str(content)
766 content = safe_str(content)
766 elif isinstance(content, (file, cStringIO.OutputType,)):
767 elif isinstance(content, (file, cStringIO.OutputType,)):
767 content = content.read()
768 content = content.read()
768 else:
769 else:
769 raise Exception('Content is of unrecognized type %s' % (
770 raise Exception('Content is of unrecognized type %s' % (
770 type(content)
771 type(content)
771 ))
772 ))
772 processed_nodes.append((f_path, content))
773 processed_nodes.append((f_path, content))
773
774
774 message = safe_unicode(message)
775 message = safe_unicode(message)
775 commiter = user.full_contact
776 commiter = user.full_contact
776 author = safe_unicode(author) if author else commiter
777 author = safe_unicode(author) if author else commiter
777
778
778 imc = scm_instance.in_memory_commit
779 imc = scm_instance.in_memory_commit
779
780
780 if not parent_commit:
781 if not parent_commit:
781 parent_commit = EmptyCommit(alias=scm_instance.alias)
782 parent_commit = EmptyCommit(alias=scm_instance.alias)
782
783
783 if isinstance(parent_commit, EmptyCommit):
784 if isinstance(parent_commit, EmptyCommit):
784 # EmptyCommit means we we're editing empty repository
785 # EmptyCommit means we we're editing empty repository
785 parents = None
786 parents = None
786 else:
787 else:
787 parents = [parent_commit]
788 parents = [parent_commit]
788 # add multiple nodes
789 # add multiple nodes
789 for path, content in processed_nodes:
790 for path, content in processed_nodes:
790 imc.add(FileNode(path, content=content))
791 imc.add(FileNode(path, content=content))
791 # TODO: handle pre push scenario
792 # TODO: handle pre push scenario
792 tip = imc.commit(message=message,
793 tip = imc.commit(message=message,
793 author=author,
794 author=author,
794 parents=parents,
795 parents=parents,
795 branch=parent_commit.branch)
796 branch=parent_commit.branch)
796
797
797 self.mark_for_invalidation(repo.repo_name)
798 self.mark_for_invalidation(repo.repo_name)
798 if trigger_push_hook:
799 if trigger_push_hook:
799 hooks_utils.trigger_post_push_hook(
800 hooks_utils.trigger_post_push_hook(
800 username=user.username, action='push_local',
801 username=user.username, action='push_local',
801 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
802 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
802 hook_type='post_push',
803 hook_type='post_push',
803 commit_ids=[tip.raw_id])
804 commit_ids=[tip.raw_id])
804 return tip
805 return tip
805
806
806 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
807 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
807 author=None, trigger_push_hook=True):
808 author=None, trigger_push_hook=True):
808 user = self._get_user(user)
809 user = self._get_user(user)
809 scm_instance = repo.scm_instance(cache=False)
810 scm_instance = repo.scm_instance(cache=False)
810
811
811 message = safe_unicode(message)
812 message = safe_unicode(message)
812 commiter = user.full_contact
813 commiter = user.full_contact
813 author = safe_unicode(author) if author else commiter
814 author = safe_unicode(author) if author else commiter
814
815
815 imc = scm_instance.in_memory_commit
816 imc = scm_instance.in_memory_commit
816
817
817 if not parent_commit:
818 if not parent_commit:
818 parent_commit = EmptyCommit(alias=scm_instance.alias)
819 parent_commit = EmptyCommit(alias=scm_instance.alias)
819
820
820 if isinstance(parent_commit, EmptyCommit):
821 if isinstance(parent_commit, EmptyCommit):
821 # EmptyCommit means we we're editing empty repository
822 # EmptyCommit means we we're editing empty repository
822 parents = None
823 parents = None
823 else:
824 else:
824 parents = [parent_commit]
825 parents = [parent_commit]
825
826
826 # add multiple nodes
827 # add multiple nodes
827 for _filename, data in nodes.items():
828 for _filename, data in nodes.items():
828 # new filename, can be renamed from the old one, also sanitaze
829 # new filename, can be renamed from the old one, also sanitaze
829 # the path for any hack around relative paths like ../../ etc.
830 # the path for any hack around relative paths like ../../ etc.
830 filename = self._sanitize_path(data['filename'])
831 filename = self._sanitize_path(data['filename'])
831 old_filename = self._sanitize_path(_filename)
832 old_filename = self._sanitize_path(_filename)
832 content = data['content']
833 content = data['content']
833 file_mode = data.get('mode')
834 file_mode = data.get('mode')
834 filenode = FileNode(old_filename, content=content, mode=file_mode)
835 filenode = FileNode(old_filename, content=content, mode=file_mode)
835 op = data['op']
836 op = data['op']
836 if op == 'add':
837 if op == 'add':
837 imc.add(filenode)
838 imc.add(filenode)
838 elif op == 'del':
839 elif op == 'del':
839 imc.remove(filenode)
840 imc.remove(filenode)
840 elif op == 'mod':
841 elif op == 'mod':
841 if filename != old_filename:
842 if filename != old_filename:
842 # TODO: handle renames more efficient, needs vcs lib changes
843 # TODO: handle renames more efficient, needs vcs lib changes
843 imc.remove(filenode)
844 imc.remove(filenode)
844 imc.add(FileNode(filename, content=content, mode=file_mode))
845 imc.add(FileNode(filename, content=content, mode=file_mode))
845 else:
846 else:
846 imc.change(filenode)
847 imc.change(filenode)
847
848
848 try:
849 try:
849 # TODO: handle pre push scenario commit changes
850 # TODO: handle pre push scenario commit changes
850 tip = imc.commit(message=message,
851 tip = imc.commit(message=message,
851 author=author,
852 author=author,
852 parents=parents,
853 parents=parents,
853 branch=parent_commit.branch)
854 branch=parent_commit.branch)
854 except NodeNotChangedError:
855 except NodeNotChangedError:
855 raise
856 raise
856 except Exception as e:
857 except Exception as e:
857 log.exception("Unexpected exception during call to imc.commit")
858 log.exception("Unexpected exception during call to imc.commit")
858 raise IMCCommitError(str(e))
859 raise IMCCommitError(str(e))
859 finally:
860 finally:
860 # always clear caches, if commit fails we want fresh object also
861 # always clear caches, if commit fails we want fresh object also
861 self.mark_for_invalidation(repo.repo_name)
862 self.mark_for_invalidation(repo.repo_name)
862
863
863 if trigger_push_hook:
864 if trigger_push_hook:
864 hooks_utils.trigger_post_push_hook(
865 hooks_utils.trigger_post_push_hook(
865 username=user.username, action='push_local', hook_type='post_push',
866 username=user.username, action='push_local', hook_type='post_push',
866 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
867 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
867 commit_ids=[tip.raw_id])
868 commit_ids=[tip.raw_id])
868
869
869 return tip
870 return tip
870
871
871 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
872 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
872 author=None, trigger_push_hook=True):
873 author=None, trigger_push_hook=True):
873 """
874 """
874 Deletes given multiple nodes into `repo`
875 Deletes given multiple nodes into `repo`
875
876
876 :param user: RhodeCode User object or user_id, the committer
877 :param user: RhodeCode User object or user_id, the committer
877 :param repo: RhodeCode Repository object
878 :param repo: RhodeCode Repository object
878 :param message: commit message
879 :param message: commit message
879 :param nodes: mapping {filename:{'content':content},...}
880 :param nodes: mapping {filename:{'content':content},...}
880 :param parent_commit: parent commit, can be empty than it's initial
881 :param parent_commit: parent commit, can be empty than it's initial
881 commit
882 commit
882 :param author: author of commit, cna be different that commiter only
883 :param author: author of commit, cna be different that commiter only
883 for git
884 for git
884 :param trigger_push_hook: trigger push hooks
885 :param trigger_push_hook: trigger push hooks
885
886
886 :returns: new commit after deletion
887 :returns: new commit after deletion
887 """
888 """
888
889
889 user = self._get_user(user)
890 user = self._get_user(user)
890 scm_instance = repo.scm_instance(cache=False)
891 scm_instance = repo.scm_instance(cache=False)
891
892
892 processed_nodes = []
893 processed_nodes = []
893 for f_path in nodes:
894 for f_path in nodes:
894 f_path = self._sanitize_path(f_path)
895 f_path = self._sanitize_path(f_path)
895 # content can be empty but for compatabilty it allows same dicts
896 # content can be empty but for compatabilty it allows same dicts
896 # structure as add_nodes
897 # structure as add_nodes
897 content = nodes[f_path].get('content')
898 content = nodes[f_path].get('content')
898 processed_nodes.append((f_path, content))
899 processed_nodes.append((f_path, content))
899
900
900 message = safe_unicode(message)
901 message = safe_unicode(message)
901 commiter = user.full_contact
902 commiter = user.full_contact
902 author = safe_unicode(author) if author else commiter
903 author = safe_unicode(author) if author else commiter
903
904
904 imc = scm_instance.in_memory_commit
905 imc = scm_instance.in_memory_commit
905
906
906 if not parent_commit:
907 if not parent_commit:
907 parent_commit = EmptyCommit(alias=scm_instance.alias)
908 parent_commit = EmptyCommit(alias=scm_instance.alias)
908
909
909 if isinstance(parent_commit, EmptyCommit):
910 if isinstance(parent_commit, EmptyCommit):
910 # EmptyCommit means we we're editing empty repository
911 # EmptyCommit means we we're editing empty repository
911 parents = None
912 parents = None
912 else:
913 else:
913 parents = [parent_commit]
914 parents = [parent_commit]
914 # add multiple nodes
915 # add multiple nodes
915 for path, content in processed_nodes:
916 for path, content in processed_nodes:
916 imc.remove(FileNode(path, content=content))
917 imc.remove(FileNode(path, content=content))
917
918
918 # TODO: handle pre push scenario
919 # TODO: handle pre push scenario
919 tip = imc.commit(message=message,
920 tip = imc.commit(message=message,
920 author=author,
921 author=author,
921 parents=parents,
922 parents=parents,
922 branch=parent_commit.branch)
923 branch=parent_commit.branch)
923
924
924 self.mark_for_invalidation(repo.repo_name)
925 self.mark_for_invalidation(repo.repo_name)
925 if trigger_push_hook:
926 if trigger_push_hook:
926 hooks_utils.trigger_post_push_hook(
927 hooks_utils.trigger_post_push_hook(
927 username=user.username, action='push_local', hook_type='post_push',
928 username=user.username, action='push_local', hook_type='post_push',
928 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
929 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
929 commit_ids=[tip.raw_id])
930 commit_ids=[tip.raw_id])
930 return tip
931 return tip
931
932
932 def strip(self, repo, commit_id, branch):
933 def strip(self, repo, commit_id, branch):
933 scm_instance = repo.scm_instance(cache=False)
934 scm_instance = repo.scm_instance(cache=False)
934 scm_instance.config.clear_section('hooks')
935 scm_instance.config.clear_section('hooks')
935 scm_instance.strip(commit_id, branch)
936 scm_instance.strip(commit_id, branch)
936 self.mark_for_invalidation(repo.repo_name)
937 self.mark_for_invalidation(repo.repo_name)
937
938
938 def get_unread_journal(self):
939 def get_unread_journal(self):
939 return self.sa.query(UserLog).count()
940 return self.sa.query(UserLog).count()
940
941
941 @classmethod
942 @classmethod
942 def backend_landing_ref(cls, repo_type):
943 def backend_landing_ref(cls, repo_type):
943 """
944 """
944 Return a default landing ref based on a repository type.
945 Return a default landing ref based on a repository type.
945 """
946 """
946
947
947 landing_ref = {
948 landing_ref = {
948 'hg': ('branch:default', 'default'),
949 'hg': ('branch:default', 'default'),
949 'git': ('branch:master', 'master'),
950 'git': ('branch:master', 'master'),
950 'svn': ('rev:tip', 'latest tip'),
951 'svn': ('rev:tip', 'latest tip'),
951 'default': ('rev:tip', 'latest tip'),
952 'default': ('rev:tip', 'latest tip'),
952 }
953 }
953
954
954 return landing_ref.get(repo_type) or landing_ref['default']
955 return landing_ref.get(repo_type) or landing_ref['default']
955
956
956 def get_repo_landing_revs(self, translator, repo=None):
957 def get_repo_landing_revs(self, translator, repo=None):
957 """
958 """
958 Generates select option with tags branches and bookmarks (for hg only)
959 Generates select option with tags branches and bookmarks (for hg only)
959 grouped by type
960 grouped by type
960
961
961 :param repo:
962 :param repo:
962 """
963 """
963 _ = translator
964 _ = translator
964 repo = self._get_repo(repo)
965 repo = self._get_repo(repo)
965
966
966 if repo:
967 if repo:
967 repo_type = repo.repo_type
968 repo_type = repo.repo_type
968 else:
969 else:
969 repo_type = 'default'
970 repo_type = 'default'
970
971
971 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
972 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
972
973
973 default_ref_options = [
974 default_ref_options = [
974 [default_landing_ref, landing_ref_lbl]
975 [default_landing_ref, landing_ref_lbl]
975 ]
976 ]
976 default_choices = [
977 default_choices = [
977 default_landing_ref
978 default_landing_ref
978 ]
979 ]
979
980
980 if not repo:
981 if not repo:
981 return default_choices, default_ref_options
982 return default_choices, default_ref_options
982
983
983 repo = repo.scm_instance()
984 repo = repo.scm_instance()
984
985
985 ref_options = [('rev:tip', 'latest tip')]
986 ref_options = [('rev:tip', 'latest tip')]
986 choices = ['rev:tip']
987 choices = ['rev:tip']
987
988
988 # branches
989 # branches
989 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
990 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
990 if not branch_group:
991 if not branch_group:
991 # new repo, or without maybe a branch?
992 # new repo, or without maybe a branch?
992 branch_group = default_ref_options
993 branch_group = default_ref_options
993
994
994 branches_group = (branch_group, _("Branches"))
995 branches_group = (branch_group, _("Branches"))
995 ref_options.append(branches_group)
996 ref_options.append(branches_group)
996 choices.extend([x[0] for x in branches_group[0]])
997 choices.extend([x[0] for x in branches_group[0]])
997
998
998 # bookmarks for HG
999 # bookmarks for HG
999 if repo.alias == 'hg':
1000 if repo.alias == 'hg':
1000 bookmarks_group = (
1001 bookmarks_group = (
1001 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1002 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1002 for b in repo.bookmarks],
1003 for b in repo.bookmarks],
1003 _("Bookmarks"))
1004 _("Bookmarks"))
1004 ref_options.append(bookmarks_group)
1005 ref_options.append(bookmarks_group)
1005 choices.extend([x[0] for x in bookmarks_group[0]])
1006 choices.extend([x[0] for x in bookmarks_group[0]])
1006
1007
1007 # tags
1008 # tags
1008 tags_group = (
1009 tags_group = (
1009 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1010 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1010 for t in repo.tags],
1011 for t in repo.tags],
1011 _("Tags"))
1012 _("Tags"))
1012 ref_options.append(tags_group)
1013 ref_options.append(tags_group)
1013 choices.extend([x[0] for x in tags_group[0]])
1014 choices.extend([x[0] for x in tags_group[0]])
1014
1015
1015 return choices, ref_options
1016 return choices, ref_options
1016
1017
1017 def get_server_info(self, environ=None):
1018 def get_server_info(self, environ=None):
1018 server_info = get_system_info(environ)
1019 server_info = get_system_info(environ)
1019 return server_info
1020 return server_info
General Comments 0
You need to be logged in to leave comments. Login now