##// END OF EJS Templates
repo-mapper: make it more resilient to errors, it's better it executes and skip certain repositories, rather then crash whole mapper.
milka -
r4547:dc47f7cc default
parent child Browse files
Show More
@@ -1,1020 +1,1024 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import traceback
26 import traceback
27 import logging
27 import logging
28 import cStringIO
28 import cStringIO
29
29
30 from sqlalchemy import func
30 from sqlalchemy import func
31 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import Lazy as LazyProperty
32
32
33 import rhodecode
33 import rhodecode
34 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs import get_backend
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.nodes import FileNode
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib import helpers as h, rc_cache
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 HasUserGroupPermissionAny)
41 HasUserGroupPermissionAny)
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 from rhodecode.lib import hooks_utils
43 from rhodecode.lib import hooks_utils
44 from rhodecode.lib.utils import (
44 from rhodecode.lib.utils import (
45 get_filesystem_repos, make_db_config)
45 get_filesystem_repos, make_db_config)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.lib.system_info import get_system_info
48 from rhodecode.model import BaseModel
48 from rhodecode.model import BaseModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 or_, false,
50 or_, false,
51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
52 PullRequest, FileStore)
52 PullRequest, FileStore)
53 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 class UserTemp(object):
59 class UserTemp(object):
60 def __init__(self, user_id):
60 def __init__(self, user_id):
61 self.user_id = user_id
61 self.user_id = user_id
62
62
63 def __repr__(self):
63 def __repr__(self):
64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
65
65
66
66
67 class RepoTemp(object):
67 class RepoTemp(object):
68 def __init__(self, repo_id):
68 def __init__(self, repo_id):
69 self.repo_id = repo_id
69 self.repo_id = repo_id
70
70
71 def __repr__(self):
71 def __repr__(self):
72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
73
73
74
74
75 class SimpleCachedRepoList(object):
75 class SimpleCachedRepoList(object):
76 """
76 """
77 Lighter version of of iteration of repos without the scm initialisation,
77 Lighter version of of iteration of repos without the scm initialisation,
78 and with cache usage
78 and with cache usage
79 """
79 """
80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 self.db_repo_list = db_repo_list
81 self.db_repo_list = db_repo_list
82 self.repos_path = repos_path
82 self.repos_path = repos_path
83 self.order_by = order_by
83 self.order_by = order_by
84 self.reversed = (order_by or '').startswith('-')
84 self.reversed = (order_by or '').startswith('-')
85 if not perm_set:
85 if not perm_set:
86 perm_set = ['repository.read', 'repository.write',
86 perm_set = ['repository.read', 'repository.write',
87 'repository.admin']
87 'repository.admin']
88 self.perm_set = perm_set
88 self.perm_set = perm_set
89
89
90 def __len__(self):
90 def __len__(self):
91 return len(self.db_repo_list)
91 return len(self.db_repo_list)
92
92
93 def __repr__(self):
93 def __repr__(self):
94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
95
95
96 def __iter__(self):
96 def __iter__(self):
97 for dbr in self.db_repo_list:
97 for dbr in self.db_repo_list:
98 # check permission at this level
98 # check permission at this level
99 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 has_perm = HasRepoPermissionAny(*self.perm_set)(
100 dbr.repo_name, 'SimpleCachedRepoList check')
100 dbr.repo_name, 'SimpleCachedRepoList check')
101 if not has_perm:
101 if not has_perm:
102 continue
102 continue
103
103
104 tmp_d = {
104 tmp_d = {
105 'name': dbr.repo_name,
105 'name': dbr.repo_name,
106 'dbrepo': dbr.get_dict(),
106 'dbrepo': dbr.get_dict(),
107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
108 }
108 }
109 yield tmp_d
109 yield tmp_d
110
110
111
111
112 class _PermCheckIterator(object):
112 class _PermCheckIterator(object):
113
113
114 def __init__(
114 def __init__(
115 self, obj_list, obj_attr, perm_set, perm_checker,
115 self, obj_list, obj_attr, perm_set, perm_checker,
116 extra_kwargs=None):
116 extra_kwargs=None):
117 """
117 """
118 Creates iterator from given list of objects, additionally
118 Creates iterator from given list of objects, additionally
119 checking permission for them from perm_set var
119 checking permission for them from perm_set var
120
120
121 :param obj_list: list of db objects
121 :param obj_list: list of db objects
122 :param obj_attr: attribute of object to pass into perm_checker
122 :param obj_attr: attribute of object to pass into perm_checker
123 :param perm_set: list of permissions to check
123 :param perm_set: list of permissions to check
124 :param perm_checker: callable to check permissions against
124 :param perm_checker: callable to check permissions against
125 """
125 """
126 self.obj_list = obj_list
126 self.obj_list = obj_list
127 self.obj_attr = obj_attr
127 self.obj_attr = obj_attr
128 self.perm_set = perm_set
128 self.perm_set = perm_set
129 self.perm_checker = perm_checker(*self.perm_set)
129 self.perm_checker = perm_checker(*self.perm_set)
130 self.extra_kwargs = extra_kwargs or {}
130 self.extra_kwargs = extra_kwargs or {}
131
131
132 def __len__(self):
132 def __len__(self):
133 return len(self.obj_list)
133 return len(self.obj_list)
134
134
135 def __repr__(self):
135 def __repr__(self):
136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
137
137
138 def __iter__(self):
138 def __iter__(self):
139 for db_obj in self.obj_list:
139 for db_obj in self.obj_list:
140 # check permission at this level
140 # check permission at this level
141 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
141 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
142 name = db_obj.__dict__.get(self.obj_attr, None)
142 name = db_obj.__dict__.get(self.obj_attr, None)
143 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
144 continue
144 continue
145
145
146 yield db_obj
146 yield db_obj
147
147
148
148
149 class RepoList(_PermCheckIterator):
149 class RepoList(_PermCheckIterator):
150
150
151 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
152 if not perm_set:
152 if not perm_set:
153 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153 perm_set = ['repository.read', 'repository.write', 'repository.admin']
154
154
155 super(RepoList, self).__init__(
155 super(RepoList, self).__init__(
156 obj_list=db_repo_list,
156 obj_list=db_repo_list,
157 obj_attr='_repo_name', perm_set=perm_set,
157 obj_attr='_repo_name', perm_set=perm_set,
158 perm_checker=HasRepoPermissionAny,
158 perm_checker=HasRepoPermissionAny,
159 extra_kwargs=extra_kwargs)
159 extra_kwargs=extra_kwargs)
160
160
161
161
162 class RepoGroupList(_PermCheckIterator):
162 class RepoGroupList(_PermCheckIterator):
163
163
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
165 if not perm_set:
165 if not perm_set:
166 perm_set = ['group.read', 'group.write', 'group.admin']
166 perm_set = ['group.read', 'group.write', 'group.admin']
167
167
168 super(RepoGroupList, self).__init__(
168 super(RepoGroupList, self).__init__(
169 obj_list=db_repo_group_list,
169 obj_list=db_repo_group_list,
170 obj_attr='_group_name', perm_set=perm_set,
170 obj_attr='_group_name', perm_set=perm_set,
171 perm_checker=HasRepoGroupPermissionAny,
171 perm_checker=HasRepoGroupPermissionAny,
172 extra_kwargs=extra_kwargs)
172 extra_kwargs=extra_kwargs)
173
173
174
174
175 class UserGroupList(_PermCheckIterator):
175 class UserGroupList(_PermCheckIterator):
176
176
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
178 if not perm_set:
178 if not perm_set:
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
180
180
181 super(UserGroupList, self).__init__(
181 super(UserGroupList, self).__init__(
182 obj_list=db_user_group_list,
182 obj_list=db_user_group_list,
183 obj_attr='users_group_name', perm_set=perm_set,
183 obj_attr='users_group_name', perm_set=perm_set,
184 perm_checker=HasUserGroupPermissionAny,
184 perm_checker=HasUserGroupPermissionAny,
185 extra_kwargs=extra_kwargs)
185 extra_kwargs=extra_kwargs)
186
186
187
187
188 class ScmModel(BaseModel):
188 class ScmModel(BaseModel):
189 """
189 """
190 Generic Scm Model
190 Generic Scm Model
191 """
191 """
192
192
193 @LazyProperty
193 @LazyProperty
194 def repos_path(self):
194 def repos_path(self):
195 """
195 """
196 Gets the repositories root path from database
196 Gets the repositories root path from database
197 """
197 """
198
198
199 settings_model = VcsSettingsModel(sa=self.sa)
199 settings_model = VcsSettingsModel(sa=self.sa)
200 return settings_model.get_repos_location()
200 return settings_model.get_repos_location()
201
201
202 def repo_scan(self, repos_path=None):
202 def repo_scan(self, repos_path=None):
203 """
203 """
204 Listing of repositories in given path. This path should not be a
204 Listing of repositories in given path. This path should not be a
205 repository itself. Return a dictionary of repository objects
205 repository itself. Return a dictionary of repository objects
206
206
207 :param repos_path: path to directory containing repositories
207 :param repos_path: path to directory containing repositories
208 """
208 """
209
209
210 if repos_path is None:
210 if repos_path is None:
211 repos_path = self.repos_path
211 repos_path = self.repos_path
212
212
213 log.info('scanning for repositories in %s', repos_path)
213 log.info('scanning for repositories in %s', repos_path)
214
214
215 config = make_db_config()
215 config = make_db_config()
216 config.set('extensions', 'largefiles', '')
216 config.set('extensions', 'largefiles', '')
217 repos = {}
217 repos = {}
218
218
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
220 # name need to be decomposed and put back together using the /
220 # name need to be decomposed and put back together using the /
221 # since this is internal storage separator for rhodecode
221 # since this is internal storage separator for rhodecode
222 name = Repository.normalize_repo_name(name)
222 name = Repository.normalize_repo_name(name)
223
223
224 try:
224 try:
225 if name in repos:
225 if name in repos:
226 raise RepositoryError('Duplicate repository name %s '
226 raise RepositoryError('Duplicate repository name %s '
227 'found in %s' % (name, path))
227 'found in %s' % (name, path))
228 elif path[0] in rhodecode.BACKENDS:
228 elif path[0] in rhodecode.BACKENDS:
229 backend = get_backend(path[0])
229 backend = get_backend(path[0])
230 repos[name] = backend(path[1], config=config,
230 repos[name] = backend(path[1], config=config,
231 with_wire={"cache": False})
231 with_wire={"cache": False})
232 except OSError:
232 except OSError:
233 continue
233 continue
234 except RepositoryError:
235 log.exception('Failed to create a repo')
236 continue
237
234 log.debug('found %s paths with repositories', len(repos))
238 log.debug('found %s paths with repositories', len(repos))
235 return repos
239 return repos
236
240
237 def get_repos(self, all_repos=None, sort_key=None):
241 def get_repos(self, all_repos=None, sort_key=None):
238 """
242 """
239 Get all repositories from db and for each repo create it's
243 Get all repositories from db and for each repo create it's
240 backend instance and fill that backed with information from database
244 backend instance and fill that backed with information from database
241
245
242 :param all_repos: list of repository names as strings
246 :param all_repos: list of repository names as strings
243 give specific repositories list, good for filtering
247 give specific repositories list, good for filtering
244
248
245 :param sort_key: initial sorting of repositories
249 :param sort_key: initial sorting of repositories
246 """
250 """
247 if all_repos is None:
251 if all_repos is None:
248 all_repos = self.sa.query(Repository)\
252 all_repos = self.sa.query(Repository)\
249 .filter(Repository.group_id == None)\
253 .filter(Repository.group_id == None)\
250 .order_by(func.lower(Repository.repo_name)).all()
254 .order_by(func.lower(Repository.repo_name)).all()
251 repo_iter = SimpleCachedRepoList(
255 repo_iter = SimpleCachedRepoList(
252 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 all_repos, repos_path=self.repos_path, order_by=sort_key)
253 return repo_iter
257 return repo_iter
254
258
255 def get_repo_groups(self, all_groups=None):
259 def get_repo_groups(self, all_groups=None):
256 if all_groups is None:
260 if all_groups is None:
257 all_groups = RepoGroup.query()\
261 all_groups = RepoGroup.query()\
258 .filter(RepoGroup.group_parent_id == None).all()
262 .filter(RepoGroup.group_parent_id == None).all()
259 return [x for x in RepoGroupList(all_groups)]
263 return [x for x in RepoGroupList(all_groups)]
260
264
261 def mark_for_invalidation(self, repo_name, delete=False):
265 def mark_for_invalidation(self, repo_name, delete=False):
262 """
266 """
263 Mark caches of this repo invalid in the database. `delete` flag
267 Mark caches of this repo invalid in the database. `delete` flag
264 removes the cache entries
268 removes the cache entries
265
269
266 :param repo_name: the repo_name for which caches should be marked
270 :param repo_name: the repo_name for which caches should be marked
267 invalid, or deleted
271 invalid, or deleted
268 :param delete: delete the entry keys instead of setting bool
272 :param delete: delete the entry keys instead of setting bool
269 flag on them, and also purge caches used by the dogpile
273 flag on them, and also purge caches used by the dogpile
270 """
274 """
271 repo = Repository.get_by_repo_name(repo_name)
275 repo = Repository.get_by_repo_name(repo_name)
272
276
273 if repo:
277 if repo:
274 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
278 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
275 repo_id=repo.repo_id)
279 repo_id=repo.repo_id)
276 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
280 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
277
281
278 repo_id = repo.repo_id
282 repo_id = repo.repo_id
279 config = repo._config
283 config = repo._config
280 config.set('extensions', 'largefiles', '')
284 config.set('extensions', 'largefiles', '')
281 repo.update_commit_cache(config=config, cs_cache=None)
285 repo.update_commit_cache(config=config, cs_cache=None)
282 if delete:
286 if delete:
283 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
287 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
284 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
288 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
285
289
286 def toggle_following_repo(self, follow_repo_id, user_id):
290 def toggle_following_repo(self, follow_repo_id, user_id):
287
291
288 f = self.sa.query(UserFollowing)\
292 f = self.sa.query(UserFollowing)\
289 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
293 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
290 .filter(UserFollowing.user_id == user_id).scalar()
294 .filter(UserFollowing.user_id == user_id).scalar()
291
295
292 if f is not None:
296 if f is not None:
293 try:
297 try:
294 self.sa.delete(f)
298 self.sa.delete(f)
295 return
299 return
296 except Exception:
300 except Exception:
297 log.error(traceback.format_exc())
301 log.error(traceback.format_exc())
298 raise
302 raise
299
303
300 try:
304 try:
301 f = UserFollowing()
305 f = UserFollowing()
302 f.user_id = user_id
306 f.user_id = user_id
303 f.follows_repo_id = follow_repo_id
307 f.follows_repo_id = follow_repo_id
304 self.sa.add(f)
308 self.sa.add(f)
305 except Exception:
309 except Exception:
306 log.error(traceback.format_exc())
310 log.error(traceback.format_exc())
307 raise
311 raise
308
312
309 def toggle_following_user(self, follow_user_id, user_id):
313 def toggle_following_user(self, follow_user_id, user_id):
310 f = self.sa.query(UserFollowing)\
314 f = self.sa.query(UserFollowing)\
311 .filter(UserFollowing.follows_user_id == follow_user_id)\
315 .filter(UserFollowing.follows_user_id == follow_user_id)\
312 .filter(UserFollowing.user_id == user_id).scalar()
316 .filter(UserFollowing.user_id == user_id).scalar()
313
317
314 if f is not None:
318 if f is not None:
315 try:
319 try:
316 self.sa.delete(f)
320 self.sa.delete(f)
317 return
321 return
318 except Exception:
322 except Exception:
319 log.error(traceback.format_exc())
323 log.error(traceback.format_exc())
320 raise
324 raise
321
325
322 try:
326 try:
323 f = UserFollowing()
327 f = UserFollowing()
324 f.user_id = user_id
328 f.user_id = user_id
325 f.follows_user_id = follow_user_id
329 f.follows_user_id = follow_user_id
326 self.sa.add(f)
330 self.sa.add(f)
327 except Exception:
331 except Exception:
328 log.error(traceback.format_exc())
332 log.error(traceback.format_exc())
329 raise
333 raise
330
334
331 def is_following_repo(self, repo_name, user_id, cache=False):
335 def is_following_repo(self, repo_name, user_id, cache=False):
332 r = self.sa.query(Repository)\
336 r = self.sa.query(Repository)\
333 .filter(Repository.repo_name == repo_name).scalar()
337 .filter(Repository.repo_name == repo_name).scalar()
334
338
335 f = self.sa.query(UserFollowing)\
339 f = self.sa.query(UserFollowing)\
336 .filter(UserFollowing.follows_repository == r)\
340 .filter(UserFollowing.follows_repository == r)\
337 .filter(UserFollowing.user_id == user_id).scalar()
341 .filter(UserFollowing.user_id == user_id).scalar()
338
342
339 return f is not None
343 return f is not None
340
344
341 def is_following_user(self, username, user_id, cache=False):
345 def is_following_user(self, username, user_id, cache=False):
342 u = User.get_by_username(username)
346 u = User.get_by_username(username)
343
347
344 f = self.sa.query(UserFollowing)\
348 f = self.sa.query(UserFollowing)\
345 .filter(UserFollowing.follows_user == u)\
349 .filter(UserFollowing.follows_user == u)\
346 .filter(UserFollowing.user_id == user_id).scalar()
350 .filter(UserFollowing.user_id == user_id).scalar()
347
351
348 return f is not None
352 return f is not None
349
353
350 def get_followers(self, repo):
354 def get_followers(self, repo):
351 repo = self._get_repo(repo)
355 repo = self._get_repo(repo)
352
356
353 return self.sa.query(UserFollowing)\
357 return self.sa.query(UserFollowing)\
354 .filter(UserFollowing.follows_repository == repo).count()
358 .filter(UserFollowing.follows_repository == repo).count()
355
359
356 def get_forks(self, repo):
360 def get_forks(self, repo):
357 repo = self._get_repo(repo)
361 repo = self._get_repo(repo)
358 return self.sa.query(Repository)\
362 return self.sa.query(Repository)\
359 .filter(Repository.fork == repo).count()
363 .filter(Repository.fork == repo).count()
360
364
361 def get_pull_requests(self, repo):
365 def get_pull_requests(self, repo):
362 repo = self._get_repo(repo)
366 repo = self._get_repo(repo)
363 return self.sa.query(PullRequest)\
367 return self.sa.query(PullRequest)\
364 .filter(PullRequest.target_repo == repo)\
368 .filter(PullRequest.target_repo == repo)\
365 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
369 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
366
370
367 def get_artifacts(self, repo):
371 def get_artifacts(self, repo):
368 repo = self._get_repo(repo)
372 repo = self._get_repo(repo)
369 return self.sa.query(FileStore)\
373 return self.sa.query(FileStore)\
370 .filter(FileStore.repo == repo)\
374 .filter(FileStore.repo == repo)\
371 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
375 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
372
376
373 def mark_as_fork(self, repo, fork, user):
377 def mark_as_fork(self, repo, fork, user):
374 repo = self._get_repo(repo)
378 repo = self._get_repo(repo)
375 fork = self._get_repo(fork)
379 fork = self._get_repo(fork)
376 if fork and repo.repo_id == fork.repo_id:
380 if fork and repo.repo_id == fork.repo_id:
377 raise Exception("Cannot set repository as fork of itself")
381 raise Exception("Cannot set repository as fork of itself")
378
382
379 if fork and repo.repo_type != fork.repo_type:
383 if fork and repo.repo_type != fork.repo_type:
380 raise RepositoryError(
384 raise RepositoryError(
381 "Cannot set repository as fork of repository with other type")
385 "Cannot set repository as fork of repository with other type")
382
386
383 repo.fork = fork
387 repo.fork = fork
384 self.sa.add(repo)
388 self.sa.add(repo)
385 return repo
389 return repo
386
390
387 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
391 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
388 dbrepo = self._get_repo(repo)
392 dbrepo = self._get_repo(repo)
389 remote_uri = remote_uri or dbrepo.clone_uri
393 remote_uri = remote_uri or dbrepo.clone_uri
390 if not remote_uri:
394 if not remote_uri:
391 raise Exception("This repository doesn't have a clone uri")
395 raise Exception("This repository doesn't have a clone uri")
392
396
393 repo = dbrepo.scm_instance(cache=False)
397 repo = dbrepo.scm_instance(cache=False)
394 repo.config.clear_section('hooks')
398 repo.config.clear_section('hooks')
395
399
396 try:
400 try:
397 # NOTE(marcink): add extra validation so we skip invalid urls
401 # NOTE(marcink): add extra validation so we skip invalid urls
398 # this is due this tasks can be executed via scheduler without
402 # this is due this tasks can be executed via scheduler without
399 # proper validation of remote_uri
403 # proper validation of remote_uri
400 if validate_uri:
404 if validate_uri:
401 config = make_db_config(clear_session=False)
405 config = make_db_config(clear_session=False)
402 url_validator(remote_uri, dbrepo.repo_type, config)
406 url_validator(remote_uri, dbrepo.repo_type, config)
403 except InvalidCloneUrl:
407 except InvalidCloneUrl:
404 raise
408 raise
405
409
406 repo_name = dbrepo.repo_name
410 repo_name = dbrepo.repo_name
407 try:
411 try:
408 # TODO: we need to make sure those operations call proper hooks !
412 # TODO: we need to make sure those operations call proper hooks !
409 repo.fetch(remote_uri)
413 repo.fetch(remote_uri)
410
414
411 self.mark_for_invalidation(repo_name)
415 self.mark_for_invalidation(repo_name)
412 except Exception:
416 except Exception:
413 log.error(traceback.format_exc())
417 log.error(traceback.format_exc())
414 raise
418 raise
415
419
416 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
420 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
417 dbrepo = self._get_repo(repo)
421 dbrepo = self._get_repo(repo)
418 remote_uri = remote_uri or dbrepo.push_uri
422 remote_uri = remote_uri or dbrepo.push_uri
419 if not remote_uri:
423 if not remote_uri:
420 raise Exception("This repository doesn't have a clone uri")
424 raise Exception("This repository doesn't have a clone uri")
421
425
422 repo = dbrepo.scm_instance(cache=False)
426 repo = dbrepo.scm_instance(cache=False)
423 repo.config.clear_section('hooks')
427 repo.config.clear_section('hooks')
424
428
425 try:
429 try:
426 # NOTE(marcink): add extra validation so we skip invalid urls
430 # NOTE(marcink): add extra validation so we skip invalid urls
427 # this is due this tasks can be executed via scheduler without
431 # this is due this tasks can be executed via scheduler without
428 # proper validation of remote_uri
432 # proper validation of remote_uri
429 if validate_uri:
433 if validate_uri:
430 config = make_db_config(clear_session=False)
434 config = make_db_config(clear_session=False)
431 url_validator(remote_uri, dbrepo.repo_type, config)
435 url_validator(remote_uri, dbrepo.repo_type, config)
432 except InvalidCloneUrl:
436 except InvalidCloneUrl:
433 raise
437 raise
434
438
435 try:
439 try:
436 repo.push(remote_uri)
440 repo.push(remote_uri)
437 except Exception:
441 except Exception:
438 log.error(traceback.format_exc())
442 log.error(traceback.format_exc())
439 raise
443 raise
440
444
441 def commit_change(self, repo, repo_name, commit, user, author, message,
445 def commit_change(self, repo, repo_name, commit, user, author, message,
442 content, f_path):
446 content, f_path):
443 """
447 """
444 Commits changes
448 Commits changes
445
449
446 :param repo: SCM instance
450 :param repo: SCM instance
447
451
448 """
452 """
449 user = self._get_user(user)
453 user = self._get_user(user)
450
454
451 # decoding here will force that we have proper encoded values
455 # decoding here will force that we have proper encoded values
452 # in any other case this will throw exceptions and deny commit
456 # in any other case this will throw exceptions and deny commit
453 content = safe_str(content)
457 content = safe_str(content)
454 path = safe_str(f_path)
458 path = safe_str(f_path)
455 # message and author needs to be unicode
459 # message and author needs to be unicode
456 # proper backend should then translate that into required type
460 # proper backend should then translate that into required type
457 message = safe_unicode(message)
461 message = safe_unicode(message)
458 author = safe_unicode(author)
462 author = safe_unicode(author)
459 imc = repo.in_memory_commit
463 imc = repo.in_memory_commit
460 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
464 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
461 try:
465 try:
462 # TODO: handle pre-push action !
466 # TODO: handle pre-push action !
463 tip = imc.commit(
467 tip = imc.commit(
464 message=message, author=author, parents=[commit],
468 message=message, author=author, parents=[commit],
465 branch=commit.branch)
469 branch=commit.branch)
466 except Exception as e:
470 except Exception as e:
467 log.error(traceback.format_exc())
471 log.error(traceback.format_exc())
468 raise IMCCommitError(str(e))
472 raise IMCCommitError(str(e))
469 finally:
473 finally:
470 # always clear caches, if commit fails we want fresh object also
474 # always clear caches, if commit fails we want fresh object also
471 self.mark_for_invalidation(repo_name)
475 self.mark_for_invalidation(repo_name)
472
476
473 # We trigger the post-push action
477 # We trigger the post-push action
474 hooks_utils.trigger_post_push_hook(
478 hooks_utils.trigger_post_push_hook(
475 username=user.username, action='push_local', hook_type='post_push',
479 username=user.username, action='push_local', hook_type='post_push',
476 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
480 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
477 return tip
481 return tip
478
482
479 def _sanitize_path(self, f_path):
483 def _sanitize_path(self, f_path):
480 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
484 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
481 raise NonRelativePathError('%s is not an relative path' % f_path)
485 raise NonRelativePathError('%s is not an relative path' % f_path)
482 if f_path:
486 if f_path:
483 f_path = os.path.normpath(f_path)
487 f_path = os.path.normpath(f_path)
484 return f_path
488 return f_path
485
489
486 def get_dirnode_metadata(self, request, commit, dir_node):
490 def get_dirnode_metadata(self, request, commit, dir_node):
487 if not dir_node.is_dir():
491 if not dir_node.is_dir():
488 return []
492 return []
489
493
490 data = []
494 data = []
491 for node in dir_node:
495 for node in dir_node:
492 if not node.is_file():
496 if not node.is_file():
493 # we skip file-nodes
497 # we skip file-nodes
494 continue
498 continue
495
499
496 last_commit = node.last_commit
500 last_commit = node.last_commit
497 last_commit_date = last_commit.date
501 last_commit_date = last_commit.date
498 data.append({
502 data.append({
499 'name': node.name,
503 'name': node.name,
500 'size': h.format_byte_size_binary(node.size),
504 'size': h.format_byte_size_binary(node.size),
501 'modified_at': h.format_date(last_commit_date),
505 'modified_at': h.format_date(last_commit_date),
502 'modified_ts': last_commit_date.isoformat(),
506 'modified_ts': last_commit_date.isoformat(),
503 'revision': last_commit.revision,
507 'revision': last_commit.revision,
504 'short_id': last_commit.short_id,
508 'short_id': last_commit.short_id,
505 'message': h.escape(last_commit.message),
509 'message': h.escape(last_commit.message),
506 'author': h.escape(last_commit.author),
510 'author': h.escape(last_commit.author),
507 'user_profile': h.gravatar_with_user(
511 'user_profile': h.gravatar_with_user(
508 request, last_commit.author),
512 request, last_commit.author),
509 })
513 })
510
514
511 return data
515 return data
512
516
513 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
517 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
514 extended_info=False, content=False, max_file_bytes=None):
518 extended_info=False, content=False, max_file_bytes=None):
515 """
519 """
516 recursive walk in root dir and return a set of all path in that dir
520 recursive walk in root dir and return a set of all path in that dir
517 based on repository walk function
521 based on repository walk function
518
522
519 :param repo_name: name of repository
523 :param repo_name: name of repository
520 :param commit_id: commit id for which to list nodes
524 :param commit_id: commit id for which to list nodes
521 :param root_path: root path to list
525 :param root_path: root path to list
522 :param flat: return as a list, if False returns a dict with description
526 :param flat: return as a list, if False returns a dict with description
523 :param extended_info: show additional info such as md5, binary, size etc
527 :param extended_info: show additional info such as md5, binary, size etc
524 :param content: add nodes content to the return data
528 :param content: add nodes content to the return data
525 :param max_file_bytes: will not return file contents over this limit
529 :param max_file_bytes: will not return file contents over this limit
526
530
527 """
531 """
528 _files = list()
532 _files = list()
529 _dirs = list()
533 _dirs = list()
530 try:
534 try:
531 _repo = self._get_repo(repo_name)
535 _repo = self._get_repo(repo_name)
532 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
536 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
533 root_path = root_path.lstrip('/')
537 root_path = root_path.lstrip('/')
534 for __, dirs, files in commit.walk(root_path):
538 for __, dirs, files in commit.walk(root_path):
535
539
536 for f in files:
540 for f in files:
537 _content = None
541 _content = None
538 _data = f_name = f.unicode_path
542 _data = f_name = f.unicode_path
539
543
540 if not flat:
544 if not flat:
541 _data = {
545 _data = {
542 "name": h.escape(f_name),
546 "name": h.escape(f_name),
543 "type": "file",
547 "type": "file",
544 }
548 }
545 if extended_info:
549 if extended_info:
546 _data.update({
550 _data.update({
547 "md5": f.md5,
551 "md5": f.md5,
548 "binary": f.is_binary,
552 "binary": f.is_binary,
549 "size": f.size,
553 "size": f.size,
550 "extension": f.extension,
554 "extension": f.extension,
551 "mimetype": f.mimetype,
555 "mimetype": f.mimetype,
552 "lines": f.lines()[0]
556 "lines": f.lines()[0]
553 })
557 })
554
558
555 if content:
559 if content:
556 over_size_limit = (max_file_bytes is not None
560 over_size_limit = (max_file_bytes is not None
557 and f.size > max_file_bytes)
561 and f.size > max_file_bytes)
558 full_content = None
562 full_content = None
559 if not f.is_binary and not over_size_limit:
563 if not f.is_binary and not over_size_limit:
560 full_content = safe_str(f.content)
564 full_content = safe_str(f.content)
561
565
562 _data.update({
566 _data.update({
563 "content": full_content,
567 "content": full_content,
564 })
568 })
565 _files.append(_data)
569 _files.append(_data)
566
570
567 for d in dirs:
571 for d in dirs:
568 _data = d_name = d.unicode_path
572 _data = d_name = d.unicode_path
569 if not flat:
573 if not flat:
570 _data = {
574 _data = {
571 "name": h.escape(d_name),
575 "name": h.escape(d_name),
572 "type": "dir",
576 "type": "dir",
573 }
577 }
574 if extended_info:
578 if extended_info:
575 _data.update({
579 _data.update({
576 "md5": None,
580 "md5": None,
577 "binary": None,
581 "binary": None,
578 "size": None,
582 "size": None,
579 "extension": None,
583 "extension": None,
580 })
584 })
581 if content:
585 if content:
582 _data.update({
586 _data.update({
583 "content": None
587 "content": None
584 })
588 })
585 _dirs.append(_data)
589 _dirs.append(_data)
586 except RepositoryError:
590 except RepositoryError:
587 log.exception("Exception in get_nodes")
591 log.exception("Exception in get_nodes")
588 raise
592 raise
589
593
590 return _dirs, _files
594 return _dirs, _files
591
595
592 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
596 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
593 """
597 """
594 Generate files for quick filter in files view
598 Generate files for quick filter in files view
595 """
599 """
596
600
597 _files = list()
601 _files = list()
598 _dirs = list()
602 _dirs = list()
599 try:
603 try:
600 _repo = self._get_repo(repo_name)
604 _repo = self._get_repo(repo_name)
601 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
605 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
602 root_path = root_path.lstrip('/')
606 root_path = root_path.lstrip('/')
603 for __, dirs, files in commit.walk(root_path):
607 for __, dirs, files in commit.walk(root_path):
604
608
605 for f in files:
609 for f in files:
606
610
607 _data = {
611 _data = {
608 "name": h.escape(f.unicode_path),
612 "name": h.escape(f.unicode_path),
609 "type": "file",
613 "type": "file",
610 }
614 }
611
615
612 _files.append(_data)
616 _files.append(_data)
613
617
614 for d in dirs:
618 for d in dirs:
615
619
616 _data = {
620 _data = {
617 "name": h.escape(d.unicode_path),
621 "name": h.escape(d.unicode_path),
618 "type": "dir",
622 "type": "dir",
619 }
623 }
620
624
621 _dirs.append(_data)
625 _dirs.append(_data)
622 except RepositoryError:
626 except RepositoryError:
623 log.exception("Exception in get_quick_filter_nodes")
627 log.exception("Exception in get_quick_filter_nodes")
624 raise
628 raise
625
629
626 return _dirs, _files
630 return _dirs, _files
627
631
628 def get_node(self, repo_name, commit_id, file_path,
632 def get_node(self, repo_name, commit_id, file_path,
629 extended_info=False, content=False, max_file_bytes=None, cache=True):
633 extended_info=False, content=False, max_file_bytes=None, cache=True):
630 """
634 """
631 retrieve single node from commit
635 retrieve single node from commit
632 """
636 """
633 try:
637 try:
634
638
635 _repo = self._get_repo(repo_name)
639 _repo = self._get_repo(repo_name)
636 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
640 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
637
641
638 file_node = commit.get_node(file_path)
642 file_node = commit.get_node(file_path)
639 if file_node.is_dir():
643 if file_node.is_dir():
640 raise RepositoryError('The given path is a directory')
644 raise RepositoryError('The given path is a directory')
641
645
642 _content = None
646 _content = None
643 f_name = file_node.unicode_path
647 f_name = file_node.unicode_path
644
648
645 file_data = {
649 file_data = {
646 "name": h.escape(f_name),
650 "name": h.escape(f_name),
647 "type": "file",
651 "type": "file",
648 }
652 }
649
653
650 if extended_info:
654 if extended_info:
651 file_data.update({
655 file_data.update({
652 "extension": file_node.extension,
656 "extension": file_node.extension,
653 "mimetype": file_node.mimetype,
657 "mimetype": file_node.mimetype,
654 })
658 })
655
659
656 if cache:
660 if cache:
657 md5 = file_node.md5
661 md5 = file_node.md5
658 is_binary = file_node.is_binary
662 is_binary = file_node.is_binary
659 size = file_node.size
663 size = file_node.size
660 else:
664 else:
661 is_binary, md5, size, _content = file_node.metadata_uncached()
665 is_binary, md5, size, _content = file_node.metadata_uncached()
662
666
663 file_data.update({
667 file_data.update({
664 "md5": md5,
668 "md5": md5,
665 "binary": is_binary,
669 "binary": is_binary,
666 "size": size,
670 "size": size,
667 })
671 })
668
672
669 if content and cache:
673 if content and cache:
670 # get content + cache
674 # get content + cache
671 size = file_node.size
675 size = file_node.size
672 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
676 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
673 full_content = None
677 full_content = None
674 all_lines = 0
678 all_lines = 0
675 if not file_node.is_binary and not over_size_limit:
679 if not file_node.is_binary and not over_size_limit:
676 full_content = safe_unicode(file_node.content)
680 full_content = safe_unicode(file_node.content)
677 all_lines, empty_lines = file_node.count_lines(full_content)
681 all_lines, empty_lines = file_node.count_lines(full_content)
678
682
679 file_data.update({
683 file_data.update({
680 "content": full_content,
684 "content": full_content,
681 "lines": all_lines
685 "lines": all_lines
682 })
686 })
683 elif content:
687 elif content:
684 # get content *without* cache
688 # get content *without* cache
685 if _content is None:
689 if _content is None:
686 is_binary, md5, size, _content = file_node.metadata_uncached()
690 is_binary, md5, size, _content = file_node.metadata_uncached()
687
691
688 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
692 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
689 full_content = None
693 full_content = None
690 all_lines = 0
694 all_lines = 0
691 if not is_binary and not over_size_limit:
695 if not is_binary and not over_size_limit:
692 full_content = safe_unicode(_content)
696 full_content = safe_unicode(_content)
693 all_lines, empty_lines = file_node.count_lines(full_content)
697 all_lines, empty_lines = file_node.count_lines(full_content)
694
698
695 file_data.update({
699 file_data.update({
696 "content": full_content,
700 "content": full_content,
697 "lines": all_lines
701 "lines": all_lines
698 })
702 })
699
703
700 except RepositoryError:
704 except RepositoryError:
701 log.exception("Exception in get_node")
705 log.exception("Exception in get_node")
702 raise
706 raise
703
707
704 return file_data
708 return file_data
705
709
706 def get_fts_data(self, repo_name, commit_id, root_path='/'):
710 def get_fts_data(self, repo_name, commit_id, root_path='/'):
707 """
711 """
708 Fetch node tree for usage in full text search
712 Fetch node tree for usage in full text search
709 """
713 """
710
714
711 tree_info = list()
715 tree_info = list()
712
716
713 try:
717 try:
714 _repo = self._get_repo(repo_name)
718 _repo = self._get_repo(repo_name)
715 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
719 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
716 root_path = root_path.lstrip('/')
720 root_path = root_path.lstrip('/')
717 for __, dirs, files in commit.walk(root_path):
721 for __, dirs, files in commit.walk(root_path):
718
722
719 for f in files:
723 for f in files:
720 is_binary, md5, size, _content = f.metadata_uncached()
724 is_binary, md5, size, _content = f.metadata_uncached()
721 _data = {
725 _data = {
722 "name": f.unicode_path,
726 "name": f.unicode_path,
723 "md5": md5,
727 "md5": md5,
724 "extension": f.extension,
728 "extension": f.extension,
725 "binary": is_binary,
729 "binary": is_binary,
726 "size": size
730 "size": size
727 }
731 }
728
732
729 tree_info.append(_data)
733 tree_info.append(_data)
730
734
731 except RepositoryError:
735 except RepositoryError:
732 log.exception("Exception in get_nodes")
736 log.exception("Exception in get_nodes")
733 raise
737 raise
734
738
735 return tree_info
739 return tree_info
736
740
737 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
741 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
738 author=None, trigger_push_hook=True):
742 author=None, trigger_push_hook=True):
739 """
743 """
740 Commits given multiple nodes into repo
744 Commits given multiple nodes into repo
741
745
742 :param user: RhodeCode User object or user_id, the commiter
746 :param user: RhodeCode User object or user_id, the commiter
743 :param repo: RhodeCode Repository object
747 :param repo: RhodeCode Repository object
744 :param message: commit message
748 :param message: commit message
745 :param nodes: mapping {filename:{'content':content},...}
749 :param nodes: mapping {filename:{'content':content},...}
746 :param parent_commit: parent commit, can be empty than it's
750 :param parent_commit: parent commit, can be empty than it's
747 initial commit
751 initial commit
748 :param author: author of commit, cna be different that commiter
752 :param author: author of commit, cna be different that commiter
749 only for git
753 only for git
750 :param trigger_push_hook: trigger push hooks
754 :param trigger_push_hook: trigger push hooks
751
755
752 :returns: new commited commit
756 :returns: new commited commit
753 """
757 """
754
758
755 user = self._get_user(user)
759 user = self._get_user(user)
756 scm_instance = repo.scm_instance(cache=False)
760 scm_instance = repo.scm_instance(cache=False)
757
761
758 processed_nodes = []
762 processed_nodes = []
759 for f_path in nodes:
763 for f_path in nodes:
760 f_path = self._sanitize_path(f_path)
764 f_path = self._sanitize_path(f_path)
761 content = nodes[f_path]['content']
765 content = nodes[f_path]['content']
762 f_path = safe_str(f_path)
766 f_path = safe_str(f_path)
763 # decoding here will force that we have proper encoded values
767 # decoding here will force that we have proper encoded values
764 # in any other case this will throw exceptions and deny commit
768 # in any other case this will throw exceptions and deny commit
765 if isinstance(content, (basestring,)):
769 if isinstance(content, (basestring,)):
766 content = safe_str(content)
770 content = safe_str(content)
767 elif isinstance(content, (file, cStringIO.OutputType,)):
771 elif isinstance(content, (file, cStringIO.OutputType,)):
768 content = content.read()
772 content = content.read()
769 else:
773 else:
770 raise Exception('Content is of unrecognized type %s' % (
774 raise Exception('Content is of unrecognized type %s' % (
771 type(content)
775 type(content)
772 ))
776 ))
773 processed_nodes.append((f_path, content))
777 processed_nodes.append((f_path, content))
774
778
775 message = safe_unicode(message)
779 message = safe_unicode(message)
776 commiter = user.full_contact
780 commiter = user.full_contact
777 author = safe_unicode(author) if author else commiter
781 author = safe_unicode(author) if author else commiter
778
782
779 imc = scm_instance.in_memory_commit
783 imc = scm_instance.in_memory_commit
780
784
781 if not parent_commit:
785 if not parent_commit:
782 parent_commit = EmptyCommit(alias=scm_instance.alias)
786 parent_commit = EmptyCommit(alias=scm_instance.alias)
783
787
784 if isinstance(parent_commit, EmptyCommit):
788 if isinstance(parent_commit, EmptyCommit):
785 # EmptyCommit means we we're editing empty repository
789 # EmptyCommit means we we're editing empty repository
786 parents = None
790 parents = None
787 else:
791 else:
788 parents = [parent_commit]
792 parents = [parent_commit]
789 # add multiple nodes
793 # add multiple nodes
790 for path, content in processed_nodes:
794 for path, content in processed_nodes:
791 imc.add(FileNode(path, content=content))
795 imc.add(FileNode(path, content=content))
792 # TODO: handle pre push scenario
796 # TODO: handle pre push scenario
793 tip = imc.commit(message=message,
797 tip = imc.commit(message=message,
794 author=author,
798 author=author,
795 parents=parents,
799 parents=parents,
796 branch=parent_commit.branch)
800 branch=parent_commit.branch)
797
801
798 self.mark_for_invalidation(repo.repo_name)
802 self.mark_for_invalidation(repo.repo_name)
799 if trigger_push_hook:
803 if trigger_push_hook:
800 hooks_utils.trigger_post_push_hook(
804 hooks_utils.trigger_post_push_hook(
801 username=user.username, action='push_local',
805 username=user.username, action='push_local',
802 repo_name=repo.repo_name, repo_type=scm_instance.alias,
806 repo_name=repo.repo_name, repo_type=scm_instance.alias,
803 hook_type='post_push',
807 hook_type='post_push',
804 commit_ids=[tip.raw_id])
808 commit_ids=[tip.raw_id])
805 return tip
809 return tip
806
810
807 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
811 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
808 author=None, trigger_push_hook=True):
812 author=None, trigger_push_hook=True):
809 user = self._get_user(user)
813 user = self._get_user(user)
810 scm_instance = repo.scm_instance(cache=False)
814 scm_instance = repo.scm_instance(cache=False)
811
815
812 message = safe_unicode(message)
816 message = safe_unicode(message)
813 commiter = user.full_contact
817 commiter = user.full_contact
814 author = safe_unicode(author) if author else commiter
818 author = safe_unicode(author) if author else commiter
815
819
816 imc = scm_instance.in_memory_commit
820 imc = scm_instance.in_memory_commit
817
821
818 if not parent_commit:
822 if not parent_commit:
819 parent_commit = EmptyCommit(alias=scm_instance.alias)
823 parent_commit = EmptyCommit(alias=scm_instance.alias)
820
824
821 if isinstance(parent_commit, EmptyCommit):
825 if isinstance(parent_commit, EmptyCommit):
822 # EmptyCommit means we we're editing empty repository
826 # EmptyCommit means we we're editing empty repository
823 parents = None
827 parents = None
824 else:
828 else:
825 parents = [parent_commit]
829 parents = [parent_commit]
826
830
827 # add multiple nodes
831 # add multiple nodes
828 for _filename, data in nodes.items():
832 for _filename, data in nodes.items():
829 # new filename, can be renamed from the old one, also sanitaze
833 # new filename, can be renamed from the old one, also sanitaze
830 # the path for any hack around relative paths like ../../ etc.
834 # the path for any hack around relative paths like ../../ etc.
831 filename = self._sanitize_path(data['filename'])
835 filename = self._sanitize_path(data['filename'])
832 old_filename = self._sanitize_path(_filename)
836 old_filename = self._sanitize_path(_filename)
833 content = data['content']
837 content = data['content']
834 file_mode = data.get('mode')
838 file_mode = data.get('mode')
835 filenode = FileNode(old_filename, content=content, mode=file_mode)
839 filenode = FileNode(old_filename, content=content, mode=file_mode)
836 op = data['op']
840 op = data['op']
837 if op == 'add':
841 if op == 'add':
838 imc.add(filenode)
842 imc.add(filenode)
839 elif op == 'del':
843 elif op == 'del':
840 imc.remove(filenode)
844 imc.remove(filenode)
841 elif op == 'mod':
845 elif op == 'mod':
842 if filename != old_filename:
846 if filename != old_filename:
843 # TODO: handle renames more efficient, needs vcs lib changes
847 # TODO: handle renames more efficient, needs vcs lib changes
844 imc.remove(filenode)
848 imc.remove(filenode)
845 imc.add(FileNode(filename, content=content, mode=file_mode))
849 imc.add(FileNode(filename, content=content, mode=file_mode))
846 else:
850 else:
847 imc.change(filenode)
851 imc.change(filenode)
848
852
849 try:
853 try:
850 # TODO: handle pre push scenario commit changes
854 # TODO: handle pre push scenario commit changes
851 tip = imc.commit(message=message,
855 tip = imc.commit(message=message,
852 author=author,
856 author=author,
853 parents=parents,
857 parents=parents,
854 branch=parent_commit.branch)
858 branch=parent_commit.branch)
855 except NodeNotChangedError:
859 except NodeNotChangedError:
856 raise
860 raise
857 except Exception as e:
861 except Exception as e:
858 log.exception("Unexpected exception during call to imc.commit")
862 log.exception("Unexpected exception during call to imc.commit")
859 raise IMCCommitError(str(e))
863 raise IMCCommitError(str(e))
860 finally:
864 finally:
861 # always clear caches, if commit fails we want fresh object also
865 # always clear caches, if commit fails we want fresh object also
862 self.mark_for_invalidation(repo.repo_name)
866 self.mark_for_invalidation(repo.repo_name)
863
867
864 if trigger_push_hook:
868 if trigger_push_hook:
865 hooks_utils.trigger_post_push_hook(
869 hooks_utils.trigger_post_push_hook(
866 username=user.username, action='push_local', hook_type='post_push',
870 username=user.username, action='push_local', hook_type='post_push',
867 repo_name=repo.repo_name, repo_type=scm_instance.alias,
871 repo_name=repo.repo_name, repo_type=scm_instance.alias,
868 commit_ids=[tip.raw_id])
872 commit_ids=[tip.raw_id])
869
873
870 return tip
874 return tip
871
875
872 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
876 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
873 author=None, trigger_push_hook=True):
877 author=None, trigger_push_hook=True):
874 """
878 """
875 Deletes given multiple nodes into `repo`
879 Deletes given multiple nodes into `repo`
876
880
877 :param user: RhodeCode User object or user_id, the committer
881 :param user: RhodeCode User object or user_id, the committer
878 :param repo: RhodeCode Repository object
882 :param repo: RhodeCode Repository object
879 :param message: commit message
883 :param message: commit message
880 :param nodes: mapping {filename:{'content':content},...}
884 :param nodes: mapping {filename:{'content':content},...}
881 :param parent_commit: parent commit, can be empty than it's initial
885 :param parent_commit: parent commit, can be empty than it's initial
882 commit
886 commit
883 :param author: author of commit, cna be different that commiter only
887 :param author: author of commit, cna be different that commiter only
884 for git
888 for git
885 :param trigger_push_hook: trigger push hooks
889 :param trigger_push_hook: trigger push hooks
886
890
887 :returns: new commit after deletion
891 :returns: new commit after deletion
888 """
892 """
889
893
890 user = self._get_user(user)
894 user = self._get_user(user)
891 scm_instance = repo.scm_instance(cache=False)
895 scm_instance = repo.scm_instance(cache=False)
892
896
893 processed_nodes = []
897 processed_nodes = []
894 for f_path in nodes:
898 for f_path in nodes:
895 f_path = self._sanitize_path(f_path)
899 f_path = self._sanitize_path(f_path)
896 # content can be empty but for compatabilty it allows same dicts
900 # content can be empty but for compatabilty it allows same dicts
897 # structure as add_nodes
901 # structure as add_nodes
898 content = nodes[f_path].get('content')
902 content = nodes[f_path].get('content')
899 processed_nodes.append((f_path, content))
903 processed_nodes.append((f_path, content))
900
904
901 message = safe_unicode(message)
905 message = safe_unicode(message)
902 commiter = user.full_contact
906 commiter = user.full_contact
903 author = safe_unicode(author) if author else commiter
907 author = safe_unicode(author) if author else commiter
904
908
905 imc = scm_instance.in_memory_commit
909 imc = scm_instance.in_memory_commit
906
910
907 if not parent_commit:
911 if not parent_commit:
908 parent_commit = EmptyCommit(alias=scm_instance.alias)
912 parent_commit = EmptyCommit(alias=scm_instance.alias)
909
913
910 if isinstance(parent_commit, EmptyCommit):
914 if isinstance(parent_commit, EmptyCommit):
911 # EmptyCommit means we we're editing empty repository
915 # EmptyCommit means we we're editing empty repository
912 parents = None
916 parents = None
913 else:
917 else:
914 parents = [parent_commit]
918 parents = [parent_commit]
915 # add multiple nodes
919 # add multiple nodes
916 for path, content in processed_nodes:
920 for path, content in processed_nodes:
917 imc.remove(FileNode(path, content=content))
921 imc.remove(FileNode(path, content=content))
918
922
919 # TODO: handle pre push scenario
923 # TODO: handle pre push scenario
920 tip = imc.commit(message=message,
924 tip = imc.commit(message=message,
921 author=author,
925 author=author,
922 parents=parents,
926 parents=parents,
923 branch=parent_commit.branch)
927 branch=parent_commit.branch)
924
928
925 self.mark_for_invalidation(repo.repo_name)
929 self.mark_for_invalidation(repo.repo_name)
926 if trigger_push_hook:
930 if trigger_push_hook:
927 hooks_utils.trigger_post_push_hook(
931 hooks_utils.trigger_post_push_hook(
928 username=user.username, action='push_local', hook_type='post_push',
932 username=user.username, action='push_local', hook_type='post_push',
929 repo_name=repo.repo_name, repo_type=scm_instance.alias,
933 repo_name=repo.repo_name, repo_type=scm_instance.alias,
930 commit_ids=[tip.raw_id])
934 commit_ids=[tip.raw_id])
931 return tip
935 return tip
932
936
933 def strip(self, repo, commit_id, branch):
937 def strip(self, repo, commit_id, branch):
934 scm_instance = repo.scm_instance(cache=False)
938 scm_instance = repo.scm_instance(cache=False)
935 scm_instance.config.clear_section('hooks')
939 scm_instance.config.clear_section('hooks')
936 scm_instance.strip(commit_id, branch)
940 scm_instance.strip(commit_id, branch)
937 self.mark_for_invalidation(repo.repo_name)
941 self.mark_for_invalidation(repo.repo_name)
938
942
939 def get_unread_journal(self):
943 def get_unread_journal(self):
940 return self.sa.query(UserLog).count()
944 return self.sa.query(UserLog).count()
941
945
942 @classmethod
946 @classmethod
943 def backend_landing_ref(cls, repo_type):
947 def backend_landing_ref(cls, repo_type):
944 """
948 """
945 Return a default landing ref based on a repository type.
949 Return a default landing ref based on a repository type.
946 """
950 """
947
951
948 landing_ref = {
952 landing_ref = {
949 'hg': ('branch:default', 'default'),
953 'hg': ('branch:default', 'default'),
950 'git': ('branch:master', 'master'),
954 'git': ('branch:master', 'master'),
951 'svn': ('rev:tip', 'latest tip'),
955 'svn': ('rev:tip', 'latest tip'),
952 'default': ('rev:tip', 'latest tip'),
956 'default': ('rev:tip', 'latest tip'),
953 }
957 }
954
958
955 return landing_ref.get(repo_type) or landing_ref['default']
959 return landing_ref.get(repo_type) or landing_ref['default']
956
960
957 def get_repo_landing_revs(self, translator, repo=None):
961 def get_repo_landing_revs(self, translator, repo=None):
958 """
962 """
959 Generates select option with tags branches and bookmarks (for hg only)
963 Generates select option with tags branches and bookmarks (for hg only)
960 grouped by type
964 grouped by type
961
965
962 :param repo:
966 :param repo:
963 """
967 """
964 _ = translator
968 _ = translator
965 repo = self._get_repo(repo)
969 repo = self._get_repo(repo)
966
970
967 if repo:
971 if repo:
968 repo_type = repo.repo_type
972 repo_type = repo.repo_type
969 else:
973 else:
970 repo_type = 'default'
974 repo_type = 'default'
971
975
972 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
976 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
973
977
974 default_ref_options = [
978 default_ref_options = [
975 [default_landing_ref, landing_ref_lbl]
979 [default_landing_ref, landing_ref_lbl]
976 ]
980 ]
977 default_choices = [
981 default_choices = [
978 default_landing_ref
982 default_landing_ref
979 ]
983 ]
980
984
981 if not repo:
985 if not repo:
982 return default_choices, default_ref_options
986 return default_choices, default_ref_options
983
987
984 repo = repo.scm_instance()
988 repo = repo.scm_instance()
985
989
986 ref_options = [('rev:tip', 'latest tip')]
990 ref_options = [('rev:tip', 'latest tip')]
987 choices = ['rev:tip']
991 choices = ['rev:tip']
988
992
989 # branches
993 # branches
990 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
994 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
991 if not branch_group:
995 if not branch_group:
992 # new repo, or without maybe a branch?
996 # new repo, or without maybe a branch?
993 branch_group = default_ref_options
997 branch_group = default_ref_options
994
998
995 branches_group = (branch_group, _("Branches"))
999 branches_group = (branch_group, _("Branches"))
996 ref_options.append(branches_group)
1000 ref_options.append(branches_group)
997 choices.extend([x[0] for x in branches_group[0]])
1001 choices.extend([x[0] for x in branches_group[0]])
998
1002
999 # bookmarks for HG
1003 # bookmarks for HG
1000 if repo.alias == 'hg':
1004 if repo.alias == 'hg':
1001 bookmarks_group = (
1005 bookmarks_group = (
1002 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1006 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1003 for b in repo.bookmarks],
1007 for b in repo.bookmarks],
1004 _("Bookmarks"))
1008 _("Bookmarks"))
1005 ref_options.append(bookmarks_group)
1009 ref_options.append(bookmarks_group)
1006 choices.extend([x[0] for x in bookmarks_group[0]])
1010 choices.extend([x[0] for x in bookmarks_group[0]])
1007
1011
1008 # tags
1012 # tags
1009 tags_group = (
1013 tags_group = (
1010 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1014 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1011 for t in repo.tags],
1015 for t in repo.tags],
1012 _("Tags"))
1016 _("Tags"))
1013 ref_options.append(tags_group)
1017 ref_options.append(tags_group)
1014 choices.extend([x[0] for x in tags_group[0]])
1018 choices.extend([x[0] for x in tags_group[0]])
1015
1019
1016 return choices, ref_options
1020 return choices, ref_options
1017
1021
1018 def get_server_info(self, environ=None):
1022 def get_server_info(self, environ=None):
1019 server_info = get_system_info(environ)
1023 server_info = get_system_info(environ)
1020 return server_info
1024 return server_info
General Comments 0
You need to be logged in to leave comments. Login now