##// END OF EJS Templates
files: don't pre load heavy attributes to compute file search
super-admin -
r5142:5611212e default
parent child Browse files
Show More
@@ -1,1041 +1,1044 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Scm model for RhodeCode
20 Scm model for RhodeCode
21 """
21 """
22
22
23 import os.path
23 import os.path
24 import traceback
24 import traceback
25 import logging
25 import logging
26 import io
26 import io
27
27
28 from sqlalchemy import func
28 from sqlalchemy import func
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 import rhodecode
31 import rhodecode
32 from rhodecode.lib.str_utils import safe_bytes
32 from rhodecode.lib.str_utils import safe_bytes
33 from rhodecode.lib.vcs import get_backend
33 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.nodes import FileNode
35 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.backends.base import EmptyCommit
36 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib import helpers as h, rc_cache
37 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib.auth import (
38 from rhodecode.lib.auth import (
39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasUserGroupPermissionAny)
40 HasUserGroupPermissionAny)
41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib import hooks_utils
42 from rhodecode.lib import hooks_utils
43 from rhodecode.lib.utils import (
43 from rhodecode.lib.utils import (
44 get_filesystem_repos, make_db_config)
44 get_filesystem_repos, make_db_config)
45 from rhodecode.lib.str_utils import safe_str
45 from rhodecode.lib.str_utils import safe_str
46 from rhodecode.lib.system_info import get_system_info
46 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.model import BaseModel
47 from rhodecode.model import BaseModel
48 from rhodecode.model.db import (
48 from rhodecode.model.db import (
49 or_, false,
49 or_, false,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 PullRequest, FileStore)
51 PullRequest, FileStore)
52 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class UserTemp(object):
58 class UserTemp(object):
59 def __init__(self, user_id):
59 def __init__(self, user_id):
60 self.user_id = user_id
60 self.user_id = user_id
61
61
62 def __repr__(self):
62 def __repr__(self):
63 return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id)
63 return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id)
64
64
65
65
66 class RepoTemp(object):
66 class RepoTemp(object):
67 def __init__(self, repo_id):
67 def __init__(self, repo_id):
68 self.repo_id = repo_id
68 self.repo_id = repo_id
69
69
70 def __repr__(self):
70 def __repr__(self):
71 return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id)
71 return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id)
72
72
73
73
74 class SimpleCachedRepoList(object):
74 class SimpleCachedRepoList(object):
75 """
75 """
76 Lighter version of of iteration of repos without the scm initialisation,
76 Lighter version of of iteration of repos without the scm initialisation,
77 and with cache usage
77 and with cache usage
78 """
78 """
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 self.db_repo_list = db_repo_list
80 self.db_repo_list = db_repo_list
81 self.repos_path = repos_path
81 self.repos_path = repos_path
82 self.order_by = order_by
82 self.order_by = order_by
83 self.reversed = (order_by or '').startswith('-')
83 self.reversed = (order_by or '').startswith('-')
84 if not perm_set:
84 if not perm_set:
85 perm_set = ['repository.read', 'repository.write',
85 perm_set = ['repository.read', 'repository.write',
86 'repository.admin']
86 'repository.admin']
87 self.perm_set = perm_set
87 self.perm_set = perm_set
88
88
89 def __len__(self):
89 def __len__(self):
90 return len(self.db_repo_list)
90 return len(self.db_repo_list)
91
91
92 def __repr__(self):
92 def __repr__(self):
93 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
93 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
94
94
95 def __iter__(self):
95 def __iter__(self):
96 for dbr in self.db_repo_list:
96 for dbr in self.db_repo_list:
97 # check permission at this level
97 # check permission at this level
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 dbr.repo_name, 'SimpleCachedRepoList check')
99 dbr.repo_name, 'SimpleCachedRepoList check')
100 if not has_perm:
100 if not has_perm:
101 continue
101 continue
102
102
103 tmp_d = {
103 tmp_d = {
104 'name': dbr.repo_name,
104 'name': dbr.repo_name,
105 'dbrepo': dbr.get_dict(),
105 'dbrepo': dbr.get_dict(),
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 }
107 }
108 yield tmp_d
108 yield tmp_d
109
109
110
110
111 class _PermCheckIterator(object):
111 class _PermCheckIterator(object):
112
112
113 def __init__(
113 def __init__(
114 self, obj_list, obj_attr, perm_set, perm_checker,
114 self, obj_list, obj_attr, perm_set, perm_checker,
115 extra_kwargs=None):
115 extra_kwargs=None):
116 """
116 """
117 Creates iterator from given list of objects, additionally
117 Creates iterator from given list of objects, additionally
118 checking permission for them from perm_set var
118 checking permission for them from perm_set var
119
119
120 :param obj_list: list of db objects
120 :param obj_list: list of db objects
121 :param obj_attr: attribute of object to pass into perm_checker
121 :param obj_attr: attribute of object to pass into perm_checker
122 :param perm_set: list of permissions to check
122 :param perm_set: list of permissions to check
123 :param perm_checker: callable to check permissions against
123 :param perm_checker: callable to check permissions against
124 """
124 """
125 self.obj_list = obj_list
125 self.obj_list = obj_list
126 self.obj_attr = obj_attr
126 self.obj_attr = obj_attr
127 self.perm_set = perm_set
127 self.perm_set = perm_set
128 self.perm_checker = perm_checker(*self.perm_set)
128 self.perm_checker = perm_checker(*self.perm_set)
129 self.extra_kwargs = extra_kwargs or {}
129 self.extra_kwargs = extra_kwargs or {}
130
130
131 def __len__(self):
131 def __len__(self):
132 return len(self.obj_list)
132 return len(self.obj_list)
133
133
134 def __repr__(self):
134 def __repr__(self):
135 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
135 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
136
136
137 def __iter__(self):
137 def __iter__(self):
138 for db_obj in self.obj_list:
138 for db_obj in self.obj_list:
139 # check permission at this level
139 # check permission at this level
140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
141 name = db_obj.__dict__.get(self.obj_attr, None)
141 name = db_obj.__dict__.get(self.obj_attr, None)
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 continue
143 continue
144
144
145 yield db_obj
145 yield db_obj
146
146
147
147
148 class RepoList(_PermCheckIterator):
148 class RepoList(_PermCheckIterator):
149
149
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 if not perm_set:
151 if not perm_set:
152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153
153
154 super().__init__(
154 super().__init__(
155 obj_list=db_repo_list,
155 obj_list=db_repo_list,
156 obj_attr='_repo_name', perm_set=perm_set,
156 obj_attr='_repo_name', perm_set=perm_set,
157 perm_checker=HasRepoPermissionAny,
157 perm_checker=HasRepoPermissionAny,
158 extra_kwargs=extra_kwargs)
158 extra_kwargs=extra_kwargs)
159
159
160
160
161 class RepoGroupList(_PermCheckIterator):
161 class RepoGroupList(_PermCheckIterator):
162
162
163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 if not perm_set:
164 if not perm_set:
165 perm_set = ['group.read', 'group.write', 'group.admin']
165 perm_set = ['group.read', 'group.write', 'group.admin']
166
166
167 super().__init__(
167 super().__init__(
168 obj_list=db_repo_group_list,
168 obj_list=db_repo_group_list,
169 obj_attr='_group_name', perm_set=perm_set,
169 obj_attr='_group_name', perm_set=perm_set,
170 perm_checker=HasRepoGroupPermissionAny,
170 perm_checker=HasRepoGroupPermissionAny,
171 extra_kwargs=extra_kwargs)
171 extra_kwargs=extra_kwargs)
172
172
173
173
174 class UserGroupList(_PermCheckIterator):
174 class UserGroupList(_PermCheckIterator):
175
175
176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 if not perm_set:
177 if not perm_set:
178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179
179
180 super().__init__(
180 super().__init__(
181 obj_list=db_user_group_list,
181 obj_list=db_user_group_list,
182 obj_attr='users_group_name', perm_set=perm_set,
182 obj_attr='users_group_name', perm_set=perm_set,
183 perm_checker=HasUserGroupPermissionAny,
183 perm_checker=HasUserGroupPermissionAny,
184 extra_kwargs=extra_kwargs)
184 extra_kwargs=extra_kwargs)
185
185
186
186
187 class ScmModel(BaseModel):
187 class ScmModel(BaseModel):
188 """
188 """
189 Generic Scm Model
189 Generic Scm Model
190 """
190 """
191
191
192 @LazyProperty
192 @LazyProperty
193 def repos_path(self):
193 def repos_path(self):
194 """
194 """
195 Gets the repositories root path from database
195 Gets the repositories root path from database
196 """
196 """
197
197
198 settings_model = VcsSettingsModel(sa=self.sa)
198 settings_model = VcsSettingsModel(sa=self.sa)
199 return settings_model.get_repos_location()
199 return settings_model.get_repos_location()
200
200
201 def repo_scan(self, repos_path=None):
201 def repo_scan(self, repos_path=None):
202 """
202 """
203 Listing of repositories in given path. This path should not be a
203 Listing of repositories in given path. This path should not be a
204 repository itself. Return a dictionary of repository objects
204 repository itself. Return a dictionary of repository objects
205
205
206 :param repos_path: path to directory containing repositories
206 :param repos_path: path to directory containing repositories
207 """
207 """
208
208
209 if repos_path is None:
209 if repos_path is None:
210 repos_path = self.repos_path
210 repos_path = self.repos_path
211
211
212 log.info('scanning for repositories in %s', repos_path)
212 log.info('scanning for repositories in %s', repos_path)
213
213
214 config = make_db_config()
214 config = make_db_config()
215 config.set('extensions', 'largefiles', '')
215 config.set('extensions', 'largefiles', '')
216 repos = {}
216 repos = {}
217
217
218 for name, path in get_filesystem_repos(repos_path, recursive=True):
218 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 # name need to be decomposed and put back together using the /
219 # name need to be decomposed and put back together using the /
220 # since this is internal storage separator for rhodecode
220 # since this is internal storage separator for rhodecode
221 name = Repository.normalize_repo_name(name)
221 name = Repository.normalize_repo_name(name)
222
222
223 try:
223 try:
224 if name in repos:
224 if name in repos:
225 raise RepositoryError('Duplicate repository name %s '
225 raise RepositoryError('Duplicate repository name %s '
226 'found in %s' % (name, path))
226 'found in %s' % (name, path))
227 elif path[0] in rhodecode.BACKENDS:
227 elif path[0] in rhodecode.BACKENDS:
228 backend = get_backend(path[0])
228 backend = get_backend(path[0])
229 repos[name] = backend(path[1], config=config,
229 repos[name] = backend(path[1], config=config,
230 with_wire={"cache": False})
230 with_wire={"cache": False})
231 except OSError:
231 except OSError:
232 continue
232 continue
233 except RepositoryError:
233 except RepositoryError:
234 log.exception('Failed to create a repo')
234 log.exception('Failed to create a repo')
235 continue
235 continue
236
236
237 log.debug('found %s paths with repositories', len(repos))
237 log.debug('found %s paths with repositories', len(repos))
238 return repos
238 return repos
239
239
240 def get_repos(self, all_repos=None, sort_key=None):
240 def get_repos(self, all_repos=None, sort_key=None):
241 """
241 """
242 Get all repositories from db and for each repo create it's
242 Get all repositories from db and for each repo create it's
243 backend instance and fill that backed with information from database
243 backend instance and fill that backed with information from database
244
244
245 :param all_repos: list of repository names as strings
245 :param all_repos: list of repository names as strings
246 give specific repositories list, good for filtering
246 give specific repositories list, good for filtering
247
247
248 :param sort_key: initial sorting of repositories
248 :param sort_key: initial sorting of repositories
249 """
249 """
250 if all_repos is None:
250 if all_repos is None:
251 all_repos = self.sa.query(Repository)\
251 all_repos = self.sa.query(Repository)\
252 .filter(Repository.group_id == None)\
252 .filter(Repository.group_id == None)\
253 .order_by(func.lower(Repository.repo_name)).all()
253 .order_by(func.lower(Repository.repo_name)).all()
254 repo_iter = SimpleCachedRepoList(
254 repo_iter = SimpleCachedRepoList(
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 return repo_iter
256 return repo_iter
257
257
258 def get_repo_groups(self, all_groups=None):
258 def get_repo_groups(self, all_groups=None):
259 if all_groups is None:
259 if all_groups is None:
260 all_groups = RepoGroup.query()\
260 all_groups = RepoGroup.query()\
261 .filter(RepoGroup.group_parent_id == None).all()
261 .filter(RepoGroup.group_parent_id == None).all()
262 return [x for x in RepoGroupList(all_groups)]
262 return [x for x in RepoGroupList(all_groups)]
263
263
264 def mark_for_invalidation(self, repo_name, delete=False):
264 def mark_for_invalidation(self, repo_name, delete=False):
265 """
265 """
266 Mark caches of this repo invalid in the database. `delete` flag
266 Mark caches of this repo invalid in the database. `delete` flag
267 removes the cache entries
267 removes the cache entries
268
268
269 :param repo_name: the repo_name for which caches should be marked
269 :param repo_name: the repo_name for which caches should be marked
270 invalid, or deleted
270 invalid, or deleted
271 :param delete: delete the entry keys instead of setting bool
271 :param delete: delete the entry keys instead of setting bool
272 flag on them, and also purge caches used by the dogpile
272 flag on them, and also purge caches used by the dogpile
273 """
273 """
274 repo = Repository.get_by_repo_name(repo_name)
274 repo = Repository.get_by_repo_name(repo_name)
275
275
276 if repo:
276 if repo:
277 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
277 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
278 repo_id=repo.repo_id)
278 repo_id=repo.repo_id)
279 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
279 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
280
280
281 repo_id = repo.repo_id
281 repo_id = repo.repo_id
282 config = repo._config
282 config = repo._config
283 config.set('extensions', 'largefiles', '')
283 config.set('extensions', 'largefiles', '')
284 repo.update_commit_cache(config=config, cs_cache=None)
284 repo.update_commit_cache(config=config, cs_cache=None)
285 if delete:
285 if delete:
286 cache_namespace_uid = f'cache_repo.{repo_id}'
286 cache_namespace_uid = f'cache_repo.{repo_id}'
287 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
287 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
288
288
289 def toggle_following_repo(self, follow_repo_id, user_id):
289 def toggle_following_repo(self, follow_repo_id, user_id):
290
290
291 f = self.sa.query(UserFollowing)\
291 f = self.sa.query(UserFollowing)\
292 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
292 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
293 .filter(UserFollowing.user_id == user_id).scalar()
293 .filter(UserFollowing.user_id == user_id).scalar()
294
294
295 if f is not None:
295 if f is not None:
296 try:
296 try:
297 self.sa.delete(f)
297 self.sa.delete(f)
298 return
298 return
299 except Exception:
299 except Exception:
300 log.error(traceback.format_exc())
300 log.error(traceback.format_exc())
301 raise
301 raise
302
302
303 try:
303 try:
304 f = UserFollowing()
304 f = UserFollowing()
305 f.user_id = user_id
305 f.user_id = user_id
306 f.follows_repo_id = follow_repo_id
306 f.follows_repo_id = follow_repo_id
307 self.sa.add(f)
307 self.sa.add(f)
308 except Exception:
308 except Exception:
309 log.error(traceback.format_exc())
309 log.error(traceback.format_exc())
310 raise
310 raise
311
311
312 def toggle_following_user(self, follow_user_id, user_id):
312 def toggle_following_user(self, follow_user_id, user_id):
313 f = self.sa.query(UserFollowing)\
313 f = self.sa.query(UserFollowing)\
314 .filter(UserFollowing.follows_user_id == follow_user_id)\
314 .filter(UserFollowing.follows_user_id == follow_user_id)\
315 .filter(UserFollowing.user_id == user_id).scalar()
315 .filter(UserFollowing.user_id == user_id).scalar()
316
316
317 if f is not None:
317 if f is not None:
318 try:
318 try:
319 self.sa.delete(f)
319 self.sa.delete(f)
320 return
320 return
321 except Exception:
321 except Exception:
322 log.error(traceback.format_exc())
322 log.error(traceback.format_exc())
323 raise
323 raise
324
324
325 try:
325 try:
326 f = UserFollowing()
326 f = UserFollowing()
327 f.user_id = user_id
327 f.user_id = user_id
328 f.follows_user_id = follow_user_id
328 f.follows_user_id = follow_user_id
329 self.sa.add(f)
329 self.sa.add(f)
330 except Exception:
330 except Exception:
331 log.error(traceback.format_exc())
331 log.error(traceback.format_exc())
332 raise
332 raise
333
333
334 def is_following_repo(self, repo_name, user_id, cache=False):
334 def is_following_repo(self, repo_name, user_id, cache=False):
335 r = self.sa.query(Repository)\
335 r = self.sa.query(Repository)\
336 .filter(Repository.repo_name == repo_name).scalar()
336 .filter(Repository.repo_name == repo_name).scalar()
337
337
338 f = self.sa.query(UserFollowing)\
338 f = self.sa.query(UserFollowing)\
339 .filter(UserFollowing.follows_repository == r)\
339 .filter(UserFollowing.follows_repository == r)\
340 .filter(UserFollowing.user_id == user_id).scalar()
340 .filter(UserFollowing.user_id == user_id).scalar()
341
341
342 return f is not None
342 return f is not None
343
343
344 def is_following_user(self, username, user_id, cache=False):
344 def is_following_user(self, username, user_id, cache=False):
345 u = User.get_by_username(username)
345 u = User.get_by_username(username)
346
346
347 f = self.sa.query(UserFollowing)\
347 f = self.sa.query(UserFollowing)\
348 .filter(UserFollowing.follows_user == u)\
348 .filter(UserFollowing.follows_user == u)\
349 .filter(UserFollowing.user_id == user_id).scalar()
349 .filter(UserFollowing.user_id == user_id).scalar()
350
350
351 return f is not None
351 return f is not None
352
352
353 def get_followers(self, repo):
353 def get_followers(self, repo):
354 repo = self._get_repo(repo)
354 repo = self._get_repo(repo)
355
355
356 return self.sa.query(UserFollowing)\
356 return self.sa.query(UserFollowing)\
357 .filter(UserFollowing.follows_repository == repo).count()
357 .filter(UserFollowing.follows_repository == repo).count()
358
358
359 def get_forks(self, repo):
359 def get_forks(self, repo):
360 repo = self._get_repo(repo)
360 repo = self._get_repo(repo)
361 return self.sa.query(Repository)\
361 return self.sa.query(Repository)\
362 .filter(Repository.fork == repo).count()
362 .filter(Repository.fork == repo).count()
363
363
364 def get_pull_requests(self, repo):
364 def get_pull_requests(self, repo):
365 repo = self._get_repo(repo)
365 repo = self._get_repo(repo)
366 return self.sa.query(PullRequest)\
366 return self.sa.query(PullRequest)\
367 .filter(PullRequest.target_repo == repo)\
367 .filter(PullRequest.target_repo == repo)\
368 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
368 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
369
369
370 def get_artifacts(self, repo):
370 def get_artifacts(self, repo):
371 repo = self._get_repo(repo)
371 repo = self._get_repo(repo)
372 return self.sa.query(FileStore)\
372 return self.sa.query(FileStore)\
373 .filter(FileStore.repo == repo)\
373 .filter(FileStore.repo == repo)\
374 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
374 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
375
375
376 def mark_as_fork(self, repo, fork, user):
376 def mark_as_fork(self, repo, fork, user):
377 repo = self._get_repo(repo)
377 repo = self._get_repo(repo)
378 fork = self._get_repo(fork)
378 fork = self._get_repo(fork)
379 if fork and repo.repo_id == fork.repo_id:
379 if fork and repo.repo_id == fork.repo_id:
380 raise Exception("Cannot set repository as fork of itself")
380 raise Exception("Cannot set repository as fork of itself")
381
381
382 if fork and repo.repo_type != fork.repo_type:
382 if fork and repo.repo_type != fork.repo_type:
383 raise RepositoryError(
383 raise RepositoryError(
384 "Cannot set repository as fork of repository with other type")
384 "Cannot set repository as fork of repository with other type")
385
385
386 repo.fork = fork
386 repo.fork = fork
387 self.sa.add(repo)
387 self.sa.add(repo)
388 return repo
388 return repo
389
389
390 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
390 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
391 dbrepo = self._get_repo(repo)
391 dbrepo = self._get_repo(repo)
392 remote_uri = remote_uri or dbrepo.clone_uri
392 remote_uri = remote_uri or dbrepo.clone_uri
393 if not remote_uri:
393 if not remote_uri:
394 raise Exception("This repository doesn't have a clone uri")
394 raise Exception("This repository doesn't have a clone uri")
395
395
396 repo = dbrepo.scm_instance(cache=False)
396 repo = dbrepo.scm_instance(cache=False)
397 repo.config.clear_section('hooks')
397 repo.config.clear_section('hooks')
398
398
399 try:
399 try:
400 # NOTE(marcink): add extra validation so we skip invalid urls
400 # NOTE(marcink): add extra validation so we skip invalid urls
401 # this is due this tasks can be executed via scheduler without
401 # this is due this tasks can be executed via scheduler without
402 # proper validation of remote_uri
402 # proper validation of remote_uri
403 if validate_uri:
403 if validate_uri:
404 config = make_db_config(clear_session=False)
404 config = make_db_config(clear_session=False)
405 url_validator(remote_uri, dbrepo.repo_type, config)
405 url_validator(remote_uri, dbrepo.repo_type, config)
406 except InvalidCloneUrl:
406 except InvalidCloneUrl:
407 raise
407 raise
408
408
409 repo_name = dbrepo.repo_name
409 repo_name = dbrepo.repo_name
410 try:
410 try:
411 # TODO: we need to make sure those operations call proper hooks !
411 # TODO: we need to make sure those operations call proper hooks !
412 repo.fetch(remote_uri)
412 repo.fetch(remote_uri)
413
413
414 self.mark_for_invalidation(repo_name)
414 self.mark_for_invalidation(repo_name)
415 except Exception:
415 except Exception:
416 log.error(traceback.format_exc())
416 log.error(traceback.format_exc())
417 raise
417 raise
418
418
419 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
419 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
420 dbrepo = self._get_repo(repo)
420 dbrepo = self._get_repo(repo)
421 remote_uri = remote_uri or dbrepo.push_uri
421 remote_uri = remote_uri or dbrepo.push_uri
422 if not remote_uri:
422 if not remote_uri:
423 raise Exception("This repository doesn't have a clone uri")
423 raise Exception("This repository doesn't have a clone uri")
424
424
425 repo = dbrepo.scm_instance(cache=False)
425 repo = dbrepo.scm_instance(cache=False)
426 repo.config.clear_section('hooks')
426 repo.config.clear_section('hooks')
427
427
428 try:
428 try:
429 # NOTE(marcink): add extra validation so we skip invalid urls
429 # NOTE(marcink): add extra validation so we skip invalid urls
430 # this is due this tasks can be executed via scheduler without
430 # this is due this tasks can be executed via scheduler without
431 # proper validation of remote_uri
431 # proper validation of remote_uri
432 if validate_uri:
432 if validate_uri:
433 config = make_db_config(clear_session=False)
433 config = make_db_config(clear_session=False)
434 url_validator(remote_uri, dbrepo.repo_type, config)
434 url_validator(remote_uri, dbrepo.repo_type, config)
435 except InvalidCloneUrl:
435 except InvalidCloneUrl:
436 raise
436 raise
437
437
438 try:
438 try:
439 repo.push(remote_uri)
439 repo.push(remote_uri)
440 except Exception:
440 except Exception:
441 log.error(traceback.format_exc())
441 log.error(traceback.format_exc())
442 raise
442 raise
443
443
444 def commit_change(self, repo, repo_name, commit, user, author, message,
444 def commit_change(self, repo, repo_name, commit, user, author, message,
445 content: bytes, f_path: bytes):
445 content: bytes, f_path: bytes):
446 """
446 """
447 Commits changes
447 Commits changes
448 """
448 """
449 user = self._get_user(user)
449 user = self._get_user(user)
450
450
451 # message and author needs to be unicode
451 # message and author needs to be unicode
452 # proper backend should then translate that into required type
452 # proper backend should then translate that into required type
453 message = safe_str(message)
453 message = safe_str(message)
454 author = safe_str(author)
454 author = safe_str(author)
455 imc = repo.in_memory_commit
455 imc = repo.in_memory_commit
456 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
456 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
457 try:
457 try:
458 # TODO: handle pre-push action !
458 # TODO: handle pre-push action !
459 tip = imc.commit(
459 tip = imc.commit(
460 message=message, author=author, parents=[commit],
460 message=message, author=author, parents=[commit],
461 branch=commit.branch)
461 branch=commit.branch)
462 except Exception as e:
462 except Exception as e:
463 log.error(traceback.format_exc())
463 log.error(traceback.format_exc())
464 raise IMCCommitError(str(e))
464 raise IMCCommitError(str(e))
465 finally:
465 finally:
466 # always clear caches, if commit fails we want fresh object also
466 # always clear caches, if commit fails we want fresh object also
467 self.mark_for_invalidation(repo_name)
467 self.mark_for_invalidation(repo_name)
468
468
469 # We trigger the post-push action
469 # We trigger the post-push action
470 hooks_utils.trigger_post_push_hook(
470 hooks_utils.trigger_post_push_hook(
471 username=user.username, action='push_local', hook_type='post_push',
471 username=user.username, action='push_local', hook_type='post_push',
472 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
472 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
473 return tip
473 return tip
474
474
475 def _sanitize_path(self, f_path: bytes):
475 def _sanitize_path(self, f_path: bytes):
476 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
476 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
477 raise NonRelativePathError(b'%b is not an relative path' % f_path)
477 raise NonRelativePathError(b'%b is not an relative path' % f_path)
478 if f_path:
478 if f_path:
479 f_path = os.path.normpath(f_path)
479 f_path = os.path.normpath(f_path)
480 return f_path
480 return f_path
481
481
482 def get_dirnode_metadata(self, request, commit, dir_node):
482 def get_dirnode_metadata(self, request, commit, dir_node):
483 if not dir_node.is_dir():
483 if not dir_node.is_dir():
484 return []
484 return []
485
485
486 data = []
486 data = []
487 for node in dir_node:
487 for node in dir_node:
488 if not node.is_file():
488 if not node.is_file():
489 # we skip file-nodes
489 # we skip file-nodes
490 continue
490 continue
491
491
492 last_commit = node.last_commit
492 last_commit = node.last_commit
493 last_commit_date = last_commit.date
493 last_commit_date = last_commit.date
494 data.append({
494 data.append({
495 'name': node.name,
495 'name': node.name,
496 'size': h.format_byte_size_binary(node.size),
496 'size': h.format_byte_size_binary(node.size),
497 'modified_at': h.format_date(last_commit_date),
497 'modified_at': h.format_date(last_commit_date),
498 'modified_ts': last_commit_date.isoformat(),
498 'modified_ts': last_commit_date.isoformat(),
499 'revision': last_commit.revision,
499 'revision': last_commit.revision,
500 'short_id': last_commit.short_id,
500 'short_id': last_commit.short_id,
501 'message': h.escape(last_commit.message),
501 'message': h.escape(last_commit.message),
502 'author': h.escape(last_commit.author),
502 'author': h.escape(last_commit.author),
503 'user_profile': h.gravatar_with_user(
503 'user_profile': h.gravatar_with_user(
504 request, last_commit.author),
504 request, last_commit.author),
505 })
505 })
506
506
507 return data
507 return data
508
508
509 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
509 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
510 extended_info=False, content=False, max_file_bytes=None):
510 extended_info=False, content=False, max_file_bytes=None):
511 """
511 """
512 recursive walk in root dir and return a set of all path in that dir
512 recursive walk in root dir and return a set of all path in that dir
513 based on repository walk function
513 based on repository walk function
514
514
515 :param repo_name: name of repository
515 :param repo_name: name of repository
516 :param commit_id: commit id for which to list nodes
516 :param commit_id: commit id for which to list nodes
517 :param root_path: root path to list
517 :param root_path: root path to list
518 :param flat: return as a list, if False returns a dict with description
518 :param flat: return as a list, if False returns a dict with description
519 :param extended_info: show additional info such as md5, binary, size etc
519 :param extended_info: show additional info such as md5, binary, size etc
520 :param content: add nodes content to the return data
520 :param content: add nodes content to the return data
521 :param max_file_bytes: will not return file contents over this limit
521 :param max_file_bytes: will not return file contents over this limit
522
522
523 """
523 """
524 _files = list()
524 _files = list()
525 _dirs = list()
525 _dirs = list()
526
526
527 try:
527 try:
528 _repo = self._get_repo(repo_name)
528 _repo = self._get_repo(repo_name)
529 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
529 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
530 root_path = root_path.lstrip('/')
530 root_path = root_path.lstrip('/')
531
531
532 # get RootNode, inject pre-load options before walking
532 # get RootNode, inject pre-load options before walking
533 top_node = commit.get_node(root_path)
533 top_node = commit.get_node(root_path)
534 extended_info_pre_load = []
534 extended_info_pre_load = []
535 if extended_info:
535 if extended_info:
536 extended_info_pre_load += ['md5']
536 extended_info_pre_load += ['md5']
537 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
537 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
538
538
539 for __, dirs, files in commit.walk(top_node):
539 for __, dirs, files in commit.walk(top_node):
540
540
541 for f in files:
541 for f in files:
542 _content = None
542 _content = None
543 _data = f_name = f.str_path
543 _data = f_name = f.str_path
544
544
545 if not flat:
545 if not flat:
546 _data = {
546 _data = {
547 "name": h.escape(f_name),
547 "name": h.escape(f_name),
548 "type": "file",
548 "type": "file",
549 }
549 }
550 if extended_info:
550 if extended_info:
551 _data.update({
551 _data.update({
552 "md5": f.md5,
552 "md5": f.md5,
553 "binary": f.is_binary,
553 "binary": f.is_binary,
554 "size": f.size,
554 "size": f.size,
555 "extension": f.extension,
555 "extension": f.extension,
556 "mimetype": f.mimetype,
556 "mimetype": f.mimetype,
557 "lines": f.lines()[0]
557 "lines": f.lines()[0]
558 })
558 })
559
559
560 if content:
560 if content:
561 over_size_limit = (max_file_bytes is not None
561 over_size_limit = (max_file_bytes is not None
562 and f.size > max_file_bytes)
562 and f.size > max_file_bytes)
563 full_content = None
563 full_content = None
564 if not f.is_binary and not over_size_limit:
564 if not f.is_binary and not over_size_limit:
565 full_content = f.str_content
565 full_content = f.str_content
566
566
567 _data.update({
567 _data.update({
568 "content": full_content,
568 "content": full_content,
569 })
569 })
570 _files.append(_data)
570 _files.append(_data)
571
571
572 for d in dirs:
572 for d in dirs:
573 _data = d_name = d.str_path
573 _data = d_name = d.str_path
574 if not flat:
574 if not flat:
575 _data = {
575 _data = {
576 "name": h.escape(d_name),
576 "name": h.escape(d_name),
577 "type": "dir",
577 "type": "dir",
578 }
578 }
579 if extended_info:
579 if extended_info:
580 _data.update({
580 _data.update({
581 "md5": "",
581 "md5": "",
582 "binary": False,
582 "binary": False,
583 "size": 0,
583 "size": 0,
584 "extension": "",
584 "extension": "",
585 })
585 })
586 if content:
586 if content:
587 _data.update({
587 _data.update({
588 "content": None
588 "content": None
589 })
589 })
590 _dirs.append(_data)
590 _dirs.append(_data)
591 except RepositoryError:
591 except RepositoryError:
592 log.exception("Exception in get_nodes")
592 log.exception("Exception in get_nodes")
593 raise
593 raise
594
594
595 return _dirs, _files
595 return _dirs, _files
596
596
597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
598 """
598 """
599 Generate files for quick filter in files view
599 Generate files for quick filter in files view
600 """
600 """
601
601
602 _files = list()
602 _files = list()
603 _dirs = list()
603 _dirs = list()
604 try:
604 try:
605 _repo = self._get_repo(repo_name)
605 _repo = self._get_repo(repo_name)
606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
607 root_path = root_path.lstrip('/')
607 root_path = root_path.lstrip('/')
608 for __, dirs, files in commit.walk(root_path):
609
608
609 top_node = commit.get_node(root_path)
610 top_node.default_pre_load = []
611
612 for __, dirs, files in commit.walk(top_node):
610 for f in files:
613 for f in files:
611
614
612 _data = {
615 _data = {
613 "name": h.escape(f.str_path),
616 "name": h.escape(f.str_path),
614 "type": "file",
617 "type": "file",
615 }
618 }
616
619
617 _files.append(_data)
620 _files.append(_data)
618
621
619 for d in dirs:
622 for d in dirs:
620
623
621 _data = {
624 _data = {
622 "name": h.escape(d.str_path),
625 "name": h.escape(d.str_path),
623 "type": "dir",
626 "type": "dir",
624 }
627 }
625
628
626 _dirs.append(_data)
629 _dirs.append(_data)
627 except RepositoryError:
630 except RepositoryError:
628 log.exception("Exception in get_quick_filter_nodes")
631 log.exception("Exception in get_quick_filter_nodes")
629 raise
632 raise
630
633
631 return _dirs, _files
634 return _dirs, _files
632
635
633 def get_node(self, repo_name, commit_id, file_path,
636 def get_node(self, repo_name, commit_id, file_path,
634 extended_info=False, content=False, max_file_bytes=None, cache=True):
637 extended_info=False, content=False, max_file_bytes=None, cache=True):
635 """
638 """
636 retrieve single node from commit
639 retrieve single node from commit
637 """
640 """
638
641
639 try:
642 try:
640
643
641 _repo = self._get_repo(repo_name)
644 _repo = self._get_repo(repo_name)
642 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
645 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
643
646
644 file_node = commit.get_node(file_path)
647 file_node = commit.get_node(file_path)
645 if file_node.is_dir():
648 if file_node.is_dir():
646 raise RepositoryError('The given path is a directory')
649 raise RepositoryError('The given path is a directory')
647
650
648 _content = None
651 _content = None
649 f_name = file_node.str_path
652 f_name = file_node.str_path
650
653
651 file_data = {
654 file_data = {
652 "name": h.escape(f_name),
655 "name": h.escape(f_name),
653 "type": "file",
656 "type": "file",
654 }
657 }
655
658
656 if extended_info:
659 if extended_info:
657 file_data.update({
660 file_data.update({
658 "extension": file_node.extension,
661 "extension": file_node.extension,
659 "mimetype": file_node.mimetype,
662 "mimetype": file_node.mimetype,
660 })
663 })
661
664
662 if cache:
665 if cache:
663 md5 = file_node.md5
666 md5 = file_node.md5
664 is_binary = file_node.is_binary
667 is_binary = file_node.is_binary
665 size = file_node.size
668 size = file_node.size
666 else:
669 else:
667 is_binary, md5, size, _content = file_node.metadata_uncached()
670 is_binary, md5, size, _content = file_node.metadata_uncached()
668
671
669 file_data.update({
672 file_data.update({
670 "md5": md5,
673 "md5": md5,
671 "binary": is_binary,
674 "binary": is_binary,
672 "size": size,
675 "size": size,
673 })
676 })
674
677
675 if content and cache:
678 if content and cache:
676 # get content + cache
679 # get content + cache
677 size = file_node.size
680 size = file_node.size
678 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
681 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
679 full_content = None
682 full_content = None
680 all_lines = 0
683 all_lines = 0
681 if not file_node.is_binary and not over_size_limit:
684 if not file_node.is_binary and not over_size_limit:
682 full_content = safe_str(file_node.content)
685 full_content = safe_str(file_node.content)
683 all_lines, empty_lines = file_node.count_lines(full_content)
686 all_lines, empty_lines = file_node.count_lines(full_content)
684
687
685 file_data.update({
688 file_data.update({
686 "content": full_content,
689 "content": full_content,
687 "lines": all_lines
690 "lines": all_lines
688 })
691 })
689 elif content:
692 elif content:
690 # get content *without* cache
693 # get content *without* cache
691 if _content is None:
694 if _content is None:
692 is_binary, md5, size, _content = file_node.metadata_uncached()
695 is_binary, md5, size, _content = file_node.metadata_uncached()
693
696
694 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
697 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
695 full_content = None
698 full_content = None
696 all_lines = 0
699 all_lines = 0
697 if not is_binary and not over_size_limit:
700 if not is_binary and not over_size_limit:
698 full_content = safe_str(_content)
701 full_content = safe_str(_content)
699 all_lines, empty_lines = file_node.count_lines(full_content)
702 all_lines, empty_lines = file_node.count_lines(full_content)
700
703
701 file_data.update({
704 file_data.update({
702 "content": full_content,
705 "content": full_content,
703 "lines": all_lines
706 "lines": all_lines
704 })
707 })
705
708
706 except RepositoryError:
709 except RepositoryError:
707 log.exception("Exception in get_node")
710 log.exception("Exception in get_node")
708 raise
711 raise
709
712
710 return file_data
713 return file_data
711
714
712 def get_fts_data(self, repo_name, commit_id, root_path='/'):
715 def get_fts_data(self, repo_name, commit_id, root_path='/'):
713 """
716 """
714 Fetch node tree for usage in full text search
717 Fetch node tree for usage in full text search
715 """
718 """
716
719
717 tree_info = list()
720 tree_info = list()
718
721
719 try:
722 try:
720 _repo = self._get_repo(repo_name)
723 _repo = self._get_repo(repo_name)
721 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
724 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
722 root_path = root_path.lstrip('/')
725 root_path = root_path.lstrip('/')
723 top_node = commit.get_node(root_path)
726 top_node = commit.get_node(root_path)
724 top_node.default_pre_load = []
727 top_node.default_pre_load = []
725
728
726 for __, dirs, files in commit.walk(top_node):
729 for __, dirs, files in commit.walk(top_node):
727
730
728 for f in files:
731 for f in files:
729 is_binary, md5, size, _content = f.metadata_uncached()
732 is_binary, md5, size, _content = f.metadata_uncached()
730 _data = {
733 _data = {
731 "name": f.str_path,
734 "name": f.str_path,
732 "md5": md5,
735 "md5": md5,
733 "extension": f.extension,
736 "extension": f.extension,
734 "binary": is_binary,
737 "binary": is_binary,
735 "size": size
738 "size": size
736 }
739 }
737
740
738 tree_info.append(_data)
741 tree_info.append(_data)
739
742
740 except RepositoryError:
743 except RepositoryError:
741 log.exception("Exception in get_nodes")
744 log.exception("Exception in get_nodes")
742 raise
745 raise
743
746
744 return tree_info
747 return tree_info
745
748
746 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
749 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
747 author=None, trigger_push_hook=True):
750 author=None, trigger_push_hook=True):
748 """
751 """
749 Commits given multiple nodes into repo
752 Commits given multiple nodes into repo
750
753
751 :param user: RhodeCode User object or user_id, the commiter
754 :param user: RhodeCode User object or user_id, the commiter
752 :param repo: RhodeCode Repository object
755 :param repo: RhodeCode Repository object
753 :param message: commit message
756 :param message: commit message
754 :param nodes: mapping {filename:{'content':content},...}
757 :param nodes: mapping {filename:{'content':content},...}
755 :param parent_commit: parent commit, can be empty than it's
758 :param parent_commit: parent commit, can be empty than it's
756 initial commit
759 initial commit
757 :param author: author of commit, cna be different that commiter
760 :param author: author of commit, cna be different that commiter
758 only for git
761 only for git
759 :param trigger_push_hook: trigger push hooks
762 :param trigger_push_hook: trigger push hooks
760
763
761 :returns: new committed commit
764 :returns: new committed commit
762 """
765 """
763
766
764 user = self._get_user(user)
767 user = self._get_user(user)
765 scm_instance = repo.scm_instance(cache=False)
768 scm_instance = repo.scm_instance(cache=False)
766
769
767 message = safe_str(message)
770 message = safe_str(message)
768 commiter = user.full_contact
771 commiter = user.full_contact
769 author = safe_str(author) if author else commiter
772 author = safe_str(author) if author else commiter
770
773
771 imc = scm_instance.in_memory_commit
774 imc = scm_instance.in_memory_commit
772
775
773 if not parent_commit:
776 if not parent_commit:
774 parent_commit = EmptyCommit(alias=scm_instance.alias)
777 parent_commit = EmptyCommit(alias=scm_instance.alias)
775
778
776 if isinstance(parent_commit, EmptyCommit):
779 if isinstance(parent_commit, EmptyCommit):
777 # EmptyCommit means we're editing empty repository
780 # EmptyCommit means we're editing empty repository
778 parents = None
781 parents = None
779 else:
782 else:
780 parents = [parent_commit]
783 parents = [parent_commit]
781
784
782 upload_file_types = (io.BytesIO, io.BufferedRandom)
785 upload_file_types = (io.BytesIO, io.BufferedRandom)
783 processed_nodes = []
786 processed_nodes = []
784 for filename, content_dict in nodes.items():
787 for filename, content_dict in nodes.items():
785 if not isinstance(filename, bytes):
788 if not isinstance(filename, bytes):
786 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
789 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
787 content = content_dict['content']
790 content = content_dict['content']
788 if not isinstance(content, upload_file_types + (bytes,)):
791 if not isinstance(content, upload_file_types + (bytes,)):
789 raise ValueError('content key value in nodes needs to be bytes')
792 raise ValueError('content key value in nodes needs to be bytes')
790
793
791 for f_path in nodes:
794 for f_path in nodes:
792 f_path = self._sanitize_path(f_path)
795 f_path = self._sanitize_path(f_path)
793 content = nodes[f_path]['content']
796 content = nodes[f_path]['content']
794
797
795 # decoding here will force that we have proper encoded values
798 # decoding here will force that we have proper encoded values
796 # in any other case this will throw exceptions and deny commit
799 # in any other case this will throw exceptions and deny commit
797
800
798 if isinstance(content, bytes):
801 if isinstance(content, bytes):
799 pass
802 pass
800 elif isinstance(content, upload_file_types):
803 elif isinstance(content, upload_file_types):
801 content = content.read()
804 content = content.read()
802 else:
805 else:
803 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
806 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
804 processed_nodes.append((f_path, content))
807 processed_nodes.append((f_path, content))
805
808
806 # add multiple nodes
809 # add multiple nodes
807 for path, content in processed_nodes:
810 for path, content in processed_nodes:
808 imc.add(FileNode(path, content=content))
811 imc.add(FileNode(path, content=content))
809
812
810 # TODO: handle pre push scenario
813 # TODO: handle pre push scenario
811 tip = imc.commit(message=message,
814 tip = imc.commit(message=message,
812 author=author,
815 author=author,
813 parents=parents,
816 parents=parents,
814 branch=parent_commit.branch)
817 branch=parent_commit.branch)
815
818
816 self.mark_for_invalidation(repo.repo_name)
819 self.mark_for_invalidation(repo.repo_name)
817 if trigger_push_hook:
820 if trigger_push_hook:
818 hooks_utils.trigger_post_push_hook(
821 hooks_utils.trigger_post_push_hook(
819 username=user.username, action='push_local',
822 username=user.username, action='push_local',
820 repo_name=repo.repo_name, repo_type=scm_instance.alias,
823 repo_name=repo.repo_name, repo_type=scm_instance.alias,
821 hook_type='post_push',
824 hook_type='post_push',
822 commit_ids=[tip.raw_id])
825 commit_ids=[tip.raw_id])
823 return tip
826 return tip
824
827
825 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
828 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
826 author=None, trigger_push_hook=True):
829 author=None, trigger_push_hook=True):
827 user = self._get_user(user)
830 user = self._get_user(user)
828 scm_instance = repo.scm_instance(cache=False)
831 scm_instance = repo.scm_instance(cache=False)
829
832
830 message = safe_str(message)
833 message = safe_str(message)
831 commiter = user.full_contact
834 commiter = user.full_contact
832 author = safe_str(author) if author else commiter
835 author = safe_str(author) if author else commiter
833
836
834 imc = scm_instance.in_memory_commit
837 imc = scm_instance.in_memory_commit
835
838
836 if not parent_commit:
839 if not parent_commit:
837 parent_commit = EmptyCommit(alias=scm_instance.alias)
840 parent_commit = EmptyCommit(alias=scm_instance.alias)
838
841
839 if isinstance(parent_commit, EmptyCommit):
842 if isinstance(parent_commit, EmptyCommit):
840 # EmptyCommit means we we're editing empty repository
843 # EmptyCommit means we we're editing empty repository
841 parents = None
844 parents = None
842 else:
845 else:
843 parents = [parent_commit]
846 parents = [parent_commit]
844
847
845 # add multiple nodes
848 # add multiple nodes
846 for _filename, data in nodes.items():
849 for _filename, data in nodes.items():
847 # new filename, can be renamed from the old one, also sanitaze
850 # new filename, can be renamed from the old one, also sanitaze
848 # the path for any hack around relative paths like ../../ etc.
851 # the path for any hack around relative paths like ../../ etc.
849 filename = self._sanitize_path(data['filename'])
852 filename = self._sanitize_path(data['filename'])
850 old_filename = self._sanitize_path(_filename)
853 old_filename = self._sanitize_path(_filename)
851 content = data['content']
854 content = data['content']
852 file_mode = data.get('mode')
855 file_mode = data.get('mode')
853 filenode = FileNode(old_filename, content=content, mode=file_mode)
856 filenode = FileNode(old_filename, content=content, mode=file_mode)
854 op = data['op']
857 op = data['op']
855 if op == 'add':
858 if op == 'add':
856 imc.add(filenode)
859 imc.add(filenode)
857 elif op == 'del':
860 elif op == 'del':
858 imc.remove(filenode)
861 imc.remove(filenode)
859 elif op == 'mod':
862 elif op == 'mod':
860 if filename != old_filename:
863 if filename != old_filename:
861 # TODO: handle renames more efficient, needs vcs lib changes
864 # TODO: handle renames more efficient, needs vcs lib changes
862 imc.remove(filenode)
865 imc.remove(filenode)
863 imc.add(FileNode(filename, content=content, mode=file_mode))
866 imc.add(FileNode(filename, content=content, mode=file_mode))
864 else:
867 else:
865 imc.change(filenode)
868 imc.change(filenode)
866
869
867 try:
870 try:
868 # TODO: handle pre push scenario commit changes
871 # TODO: handle pre push scenario commit changes
869 tip = imc.commit(message=message,
872 tip = imc.commit(message=message,
870 author=author,
873 author=author,
871 parents=parents,
874 parents=parents,
872 branch=parent_commit.branch)
875 branch=parent_commit.branch)
873 except NodeNotChangedError:
876 except NodeNotChangedError:
874 raise
877 raise
875 except Exception as e:
878 except Exception as e:
876 log.exception("Unexpected exception during call to imc.commit")
879 log.exception("Unexpected exception during call to imc.commit")
877 raise IMCCommitError(str(e))
880 raise IMCCommitError(str(e))
878 finally:
881 finally:
879 # always clear caches, if commit fails we want fresh object also
882 # always clear caches, if commit fails we want fresh object also
880 self.mark_for_invalidation(repo.repo_name)
883 self.mark_for_invalidation(repo.repo_name)
881
884
882 if trigger_push_hook:
885 if trigger_push_hook:
883 hooks_utils.trigger_post_push_hook(
886 hooks_utils.trigger_post_push_hook(
884 username=user.username, action='push_local', hook_type='post_push',
887 username=user.username, action='push_local', hook_type='post_push',
885 repo_name=repo.repo_name, repo_type=scm_instance.alias,
888 repo_name=repo.repo_name, repo_type=scm_instance.alias,
886 commit_ids=[tip.raw_id])
889 commit_ids=[tip.raw_id])
887
890
888 return tip
891 return tip
889
892
890 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
893 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
891 author=None, trigger_push_hook=True):
894 author=None, trigger_push_hook=True):
892 """
895 """
893 Deletes given multiple nodes into `repo`
896 Deletes given multiple nodes into `repo`
894
897
895 :param user: RhodeCode User object or user_id, the committer
898 :param user: RhodeCode User object or user_id, the committer
896 :param repo: RhodeCode Repository object
899 :param repo: RhodeCode Repository object
897 :param message: commit message
900 :param message: commit message
898 :param nodes: mapping {filename:{'content':content},...}
901 :param nodes: mapping {filename:{'content':content},...}
899 :param parent_commit: parent commit, can be empty than it's initial
902 :param parent_commit: parent commit, can be empty than it's initial
900 commit
903 commit
901 :param author: author of commit, cna be different that commiter only
904 :param author: author of commit, cna be different that commiter only
902 for git
905 for git
903 :param trigger_push_hook: trigger push hooks
906 :param trigger_push_hook: trigger push hooks
904
907
905 :returns: new commit after deletion
908 :returns: new commit after deletion
906 """
909 """
907
910
908 user = self._get_user(user)
911 user = self._get_user(user)
909 scm_instance = repo.scm_instance(cache=False)
912 scm_instance = repo.scm_instance(cache=False)
910
913
911 processed_nodes = []
914 processed_nodes = []
912 for f_path in nodes:
915 for f_path in nodes:
913 f_path = self._sanitize_path(f_path)
916 f_path = self._sanitize_path(f_path)
914 # content can be empty but for compatibility it allows same dicts
917 # content can be empty but for compatibility it allows same dicts
915 # structure as add_nodes
918 # structure as add_nodes
916 content = nodes[f_path].get('content')
919 content = nodes[f_path].get('content')
917 processed_nodes.append((safe_bytes(f_path), content))
920 processed_nodes.append((safe_bytes(f_path), content))
918
921
919 message = safe_str(message)
922 message = safe_str(message)
920 commiter = user.full_contact
923 commiter = user.full_contact
921 author = safe_str(author) if author else commiter
924 author = safe_str(author) if author else commiter
922
925
923 imc = scm_instance.in_memory_commit
926 imc = scm_instance.in_memory_commit
924
927
925 if not parent_commit:
928 if not parent_commit:
926 parent_commit = EmptyCommit(alias=scm_instance.alias)
929 parent_commit = EmptyCommit(alias=scm_instance.alias)
927
930
928 if isinstance(parent_commit, EmptyCommit):
931 if isinstance(parent_commit, EmptyCommit):
929 # EmptyCommit means we we're editing empty repository
932 # EmptyCommit means we we're editing empty repository
930 parents = None
933 parents = None
931 else:
934 else:
932 parents = [parent_commit]
935 parents = [parent_commit]
933 # add multiple nodes
936 # add multiple nodes
934 for path, content in processed_nodes:
937 for path, content in processed_nodes:
935 imc.remove(FileNode(path, content=content))
938 imc.remove(FileNode(path, content=content))
936
939
937 # TODO: handle pre push scenario
940 # TODO: handle pre push scenario
938 tip = imc.commit(message=message,
941 tip = imc.commit(message=message,
939 author=author,
942 author=author,
940 parents=parents,
943 parents=parents,
941 branch=parent_commit.branch)
944 branch=parent_commit.branch)
942
945
943 self.mark_for_invalidation(repo.repo_name)
946 self.mark_for_invalidation(repo.repo_name)
944 if trigger_push_hook:
947 if trigger_push_hook:
945 hooks_utils.trigger_post_push_hook(
948 hooks_utils.trigger_post_push_hook(
946 username=user.username, action='push_local', hook_type='post_push',
949 username=user.username, action='push_local', hook_type='post_push',
947 repo_name=repo.repo_name, repo_type=scm_instance.alias,
950 repo_name=repo.repo_name, repo_type=scm_instance.alias,
948 commit_ids=[tip.raw_id])
951 commit_ids=[tip.raw_id])
949 return tip
952 return tip
950
953
951 def strip(self, repo, commit_id, branch):
954 def strip(self, repo, commit_id, branch):
952 scm_instance = repo.scm_instance(cache=False)
955 scm_instance = repo.scm_instance(cache=False)
953 scm_instance.config.clear_section('hooks')
956 scm_instance.config.clear_section('hooks')
954 scm_instance.strip(commit_id, branch)
957 scm_instance.strip(commit_id, branch)
955 self.mark_for_invalidation(repo.repo_name)
958 self.mark_for_invalidation(repo.repo_name)
956
959
957 def get_unread_journal(self):
960 def get_unread_journal(self):
958 return self.sa.query(UserLog).count()
961 return self.sa.query(UserLog).count()
959
962
960 @classmethod
963 @classmethod
961 def backend_landing_ref(cls, repo_type):
964 def backend_landing_ref(cls, repo_type):
962 """
965 """
963 Return a default landing ref based on a repository type.
966 Return a default landing ref based on a repository type.
964 """
967 """
965
968
966 landing_ref = {
969 landing_ref = {
967 'hg': ('branch:default', 'default'),
970 'hg': ('branch:default', 'default'),
968 'git': ('branch:master', 'master'),
971 'git': ('branch:master', 'master'),
969 'svn': ('rev:tip', 'latest tip'),
972 'svn': ('rev:tip', 'latest tip'),
970 'default': ('rev:tip', 'latest tip'),
973 'default': ('rev:tip', 'latest tip'),
971 }
974 }
972
975
973 return landing_ref.get(repo_type) or landing_ref['default']
976 return landing_ref.get(repo_type) or landing_ref['default']
974
977
975 def get_repo_landing_revs(self, translator, repo=None):
978 def get_repo_landing_revs(self, translator, repo=None):
976 """
979 """
977 Generates select option with tags branches and bookmarks (for hg only)
980 Generates select option with tags branches and bookmarks (for hg only)
978 grouped by type
981 grouped by type
979
982
980 :param repo:
983 :param repo:
981 """
984 """
982 from rhodecode.lib.vcs.backends.git import GitRepository
985 from rhodecode.lib.vcs.backends.git import GitRepository
983
986
984 _ = translator
987 _ = translator
985 repo = self._get_repo(repo)
988 repo = self._get_repo(repo)
986
989
987 if repo:
990 if repo:
988 repo_type = repo.repo_type
991 repo_type = repo.repo_type
989 else:
992 else:
990 repo_type = 'default'
993 repo_type = 'default'
991
994
992 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
995 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
993
996
994 default_ref_options = [
997 default_ref_options = [
995 [default_landing_ref, landing_ref_lbl]
998 [default_landing_ref, landing_ref_lbl]
996 ]
999 ]
997 default_choices = [
1000 default_choices = [
998 default_landing_ref
1001 default_landing_ref
999 ]
1002 ]
1000
1003
1001 if not repo:
1004 if not repo:
1002 # presented at NEW repo creation
1005 # presented at NEW repo creation
1003 return default_choices, default_ref_options
1006 return default_choices, default_ref_options
1004
1007
1005 repo = repo.scm_instance()
1008 repo = repo.scm_instance()
1006
1009
1007 ref_options = [(default_landing_ref, landing_ref_lbl)]
1010 ref_options = [(default_landing_ref, landing_ref_lbl)]
1008 choices = [default_landing_ref]
1011 choices = [default_landing_ref]
1009
1012
1010 # branches
1013 # branches
1011 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1014 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1012 if not branch_group:
1015 if not branch_group:
1013 # new repo, or without maybe a branch?
1016 # new repo, or without maybe a branch?
1014 branch_group = default_ref_options
1017 branch_group = default_ref_options
1015
1018
1016 branches_group = (branch_group, _("Branches"))
1019 branches_group = (branch_group, _("Branches"))
1017 ref_options.append(branches_group)
1020 ref_options.append(branches_group)
1018 choices.extend([x[0] for x in branches_group[0]])
1021 choices.extend([x[0] for x in branches_group[0]])
1019
1022
1020 # bookmarks for HG
1023 # bookmarks for HG
1021 if repo.alias == 'hg':
1024 if repo.alias == 'hg':
1022 bookmarks_group = (
1025 bookmarks_group = (
1023 [(f'book:{safe_str(b)}', safe_str(b))
1026 [(f'book:{safe_str(b)}', safe_str(b))
1024 for b in repo.bookmarks],
1027 for b in repo.bookmarks],
1025 _("Bookmarks"))
1028 _("Bookmarks"))
1026 ref_options.append(bookmarks_group)
1029 ref_options.append(bookmarks_group)
1027 choices.extend([x[0] for x in bookmarks_group[0]])
1030 choices.extend([x[0] for x in bookmarks_group[0]])
1028
1031
1029 # tags
1032 # tags
1030 tags_group = (
1033 tags_group = (
1031 [(f'tag:{safe_str(t)}', safe_str(t))
1034 [(f'tag:{safe_str(t)}', safe_str(t))
1032 for t in repo.tags],
1035 for t in repo.tags],
1033 _("Tags"))
1036 _("Tags"))
1034 ref_options.append(tags_group)
1037 ref_options.append(tags_group)
1035 choices.extend([x[0] for x in tags_group[0]])
1038 choices.extend([x[0] for x in tags_group[0]])
1036
1039
1037 return choices, ref_options
1040 return choices, ref_options
1038
1041
1039 def get_server_info(self, environ=None):
1042 def get_server_info(self, environ=None):
1040 server_info = get_system_info(environ)
1043 server_info = get_system_info(environ)
1041 return server_info
1044 return server_info
General Comments 0
You need to be logged in to leave comments. Login now