##// END OF EJS Templates
search-api: don't escape files exposed by the full-text-search API....
marcink -
r3489:b852ad38 default
parent child Browse files
Show More
@@ -1,941 +1,939 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import traceback
26 import traceback
27 import logging
27 import logging
28 import cStringIO
28 import cStringIO
29
29
30 from sqlalchemy import func
30 from sqlalchemy import func
31 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import Lazy as LazyProperty
32
32
33 import rhodecode
33 import rhodecode
34 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs import get_backend
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.nodes import FileNode
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib import helpers as h, rc_cache
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 HasUserGroupPermissionAny)
41 HasUserGroupPermissionAny)
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 from rhodecode.lib import hooks_utils
43 from rhodecode.lib import hooks_utils
44 from rhodecode.lib.utils import (
44 from rhodecode.lib.utils import (
45 get_filesystem_repos, make_db_config)
45 get_filesystem_repos, make_db_config)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.lib.system_info import get_system_info
48 from rhodecode.model import BaseModel
48 from rhodecode.model import BaseModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 PullRequest)
51 PullRequest)
52 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class UserTemp(object):
58 class UserTemp(object):
59 def __init__(self, user_id):
59 def __init__(self, user_id):
60 self.user_id = user_id
60 self.user_id = user_id
61
61
62 def __repr__(self):
62 def __repr__(self):
63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64
64
65
65
66 class RepoTemp(object):
66 class RepoTemp(object):
67 def __init__(self, repo_id):
67 def __init__(self, repo_id):
68 self.repo_id = repo_id
68 self.repo_id = repo_id
69
69
70 def __repr__(self):
70 def __repr__(self):
71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72
72
73
73
74 class SimpleCachedRepoList(object):
74 class SimpleCachedRepoList(object):
75 """
75 """
76 Lighter version of of iteration of repos without the scm initialisation,
76 Lighter version of of iteration of repos without the scm initialisation,
77 and with cache usage
77 and with cache usage
78 """
78 """
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 self.db_repo_list = db_repo_list
80 self.db_repo_list = db_repo_list
81 self.repos_path = repos_path
81 self.repos_path = repos_path
82 self.order_by = order_by
82 self.order_by = order_by
83 self.reversed = (order_by or '').startswith('-')
83 self.reversed = (order_by or '').startswith('-')
84 if not perm_set:
84 if not perm_set:
85 perm_set = ['repository.read', 'repository.write',
85 perm_set = ['repository.read', 'repository.write',
86 'repository.admin']
86 'repository.admin']
87 self.perm_set = perm_set
87 self.perm_set = perm_set
88
88
89 def __len__(self):
89 def __len__(self):
90 return len(self.db_repo_list)
90 return len(self.db_repo_list)
91
91
92 def __repr__(self):
92 def __repr__(self):
93 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
93 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94
94
95 def __iter__(self):
95 def __iter__(self):
96 for dbr in self.db_repo_list:
96 for dbr in self.db_repo_list:
97 # check permission at this level
97 # check permission at this level
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 dbr.repo_name, 'SimpleCachedRepoList check')
99 dbr.repo_name, 'SimpleCachedRepoList check')
100 if not has_perm:
100 if not has_perm:
101 continue
101 continue
102
102
103 tmp_d = {
103 tmp_d = {
104 'name': dbr.repo_name,
104 'name': dbr.repo_name,
105 'dbrepo': dbr.get_dict(),
105 'dbrepo': dbr.get_dict(),
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 }
107 }
108 yield tmp_d
108 yield tmp_d
109
109
110
110
111 class _PermCheckIterator(object):
111 class _PermCheckIterator(object):
112
112
113 def __init__(
113 def __init__(
114 self, obj_list, obj_attr, perm_set, perm_checker,
114 self, obj_list, obj_attr, perm_set, perm_checker,
115 extra_kwargs=None):
115 extra_kwargs=None):
116 """
116 """
117 Creates iterator from given list of objects, additionally
117 Creates iterator from given list of objects, additionally
118 checking permission for them from perm_set var
118 checking permission for them from perm_set var
119
119
120 :param obj_list: list of db objects
120 :param obj_list: list of db objects
121 :param obj_attr: attribute of object to pass into perm_checker
121 :param obj_attr: attribute of object to pass into perm_checker
122 :param perm_set: list of permissions to check
122 :param perm_set: list of permissions to check
123 :param perm_checker: callable to check permissions against
123 :param perm_checker: callable to check permissions against
124 """
124 """
125 self.obj_list = obj_list
125 self.obj_list = obj_list
126 self.obj_attr = obj_attr
126 self.obj_attr = obj_attr
127 self.perm_set = perm_set
127 self.perm_set = perm_set
128 self.perm_checker = perm_checker
128 self.perm_checker = perm_checker
129 self.extra_kwargs = extra_kwargs or {}
129 self.extra_kwargs = extra_kwargs or {}
130
130
131 def __len__(self):
131 def __len__(self):
132 return len(self.obj_list)
132 return len(self.obj_list)
133
133
134 def __repr__(self):
134 def __repr__(self):
135 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
135 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136
136
137 def __iter__(self):
137 def __iter__(self):
138 checker = self.perm_checker(*self.perm_set)
138 checker = self.perm_checker(*self.perm_set)
139 for db_obj in self.obj_list:
139 for db_obj in self.obj_list:
140 # check permission at this level
140 # check permission at this level
141 name = getattr(db_obj, self.obj_attr, None)
141 name = getattr(db_obj, self.obj_attr, None)
142 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
142 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
143 continue
143 continue
144
144
145 yield db_obj
145 yield db_obj
146
146
147
147
148 class RepoList(_PermCheckIterator):
148 class RepoList(_PermCheckIterator):
149
149
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 if not perm_set:
151 if not perm_set:
152 perm_set = [
152 perm_set = [
153 'repository.read', 'repository.write', 'repository.admin']
153 'repository.read', 'repository.write', 'repository.admin']
154
154
155 super(RepoList, self).__init__(
155 super(RepoList, self).__init__(
156 obj_list=db_repo_list,
156 obj_list=db_repo_list,
157 obj_attr='repo_name', perm_set=perm_set,
157 obj_attr='repo_name', perm_set=perm_set,
158 perm_checker=HasRepoPermissionAny,
158 perm_checker=HasRepoPermissionAny,
159 extra_kwargs=extra_kwargs)
159 extra_kwargs=extra_kwargs)
160
160
161
161
162 class RepoGroupList(_PermCheckIterator):
162 class RepoGroupList(_PermCheckIterator):
163
163
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
165 if not perm_set:
165 if not perm_set:
166 perm_set = ['group.read', 'group.write', 'group.admin']
166 perm_set = ['group.read', 'group.write', 'group.admin']
167
167
168 super(RepoGroupList, self).__init__(
168 super(RepoGroupList, self).__init__(
169 obj_list=db_repo_group_list,
169 obj_list=db_repo_group_list,
170 obj_attr='group_name', perm_set=perm_set,
170 obj_attr='group_name', perm_set=perm_set,
171 perm_checker=HasRepoGroupPermissionAny,
171 perm_checker=HasRepoGroupPermissionAny,
172 extra_kwargs=extra_kwargs)
172 extra_kwargs=extra_kwargs)
173
173
174
174
175 class UserGroupList(_PermCheckIterator):
175 class UserGroupList(_PermCheckIterator):
176
176
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
178 if not perm_set:
178 if not perm_set:
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
180
180
181 super(UserGroupList, self).__init__(
181 super(UserGroupList, self).__init__(
182 obj_list=db_user_group_list,
182 obj_list=db_user_group_list,
183 obj_attr='users_group_name', perm_set=perm_set,
183 obj_attr='users_group_name', perm_set=perm_set,
184 perm_checker=HasUserGroupPermissionAny,
184 perm_checker=HasUserGroupPermissionAny,
185 extra_kwargs=extra_kwargs)
185 extra_kwargs=extra_kwargs)
186
186
187
187
188 class ScmModel(BaseModel):
188 class ScmModel(BaseModel):
189 """
189 """
190 Generic Scm Model
190 Generic Scm Model
191 """
191 """
192
192
193 @LazyProperty
193 @LazyProperty
194 def repos_path(self):
194 def repos_path(self):
195 """
195 """
196 Gets the repositories root path from database
196 Gets the repositories root path from database
197 """
197 """
198
198
199 settings_model = VcsSettingsModel(sa=self.sa)
199 settings_model = VcsSettingsModel(sa=self.sa)
200 return settings_model.get_repos_location()
200 return settings_model.get_repos_location()
201
201
202 def repo_scan(self, repos_path=None):
202 def repo_scan(self, repos_path=None):
203 """
203 """
204 Listing of repositories in given path. This path should not be a
204 Listing of repositories in given path. This path should not be a
205 repository itself. Return a dictionary of repository objects
205 repository itself. Return a dictionary of repository objects
206
206
207 :param repos_path: path to directory containing repositories
207 :param repos_path: path to directory containing repositories
208 """
208 """
209
209
210 if repos_path is None:
210 if repos_path is None:
211 repos_path = self.repos_path
211 repos_path = self.repos_path
212
212
213 log.info('scanning for repositories in %s', repos_path)
213 log.info('scanning for repositories in %s', repos_path)
214
214
215 config = make_db_config()
215 config = make_db_config()
216 config.set('extensions', 'largefiles', '')
216 config.set('extensions', 'largefiles', '')
217 repos = {}
217 repos = {}
218
218
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
220 # name need to be decomposed and put back together using the /
220 # name need to be decomposed and put back together using the /
221 # since this is internal storage separator for rhodecode
221 # since this is internal storage separator for rhodecode
222 name = Repository.normalize_repo_name(name)
222 name = Repository.normalize_repo_name(name)
223
223
224 try:
224 try:
225 if name in repos:
225 if name in repos:
226 raise RepositoryError('Duplicate repository name %s '
226 raise RepositoryError('Duplicate repository name %s '
227 'found in %s' % (name, path))
227 'found in %s' % (name, path))
228 elif path[0] in rhodecode.BACKENDS:
228 elif path[0] in rhodecode.BACKENDS:
229 klass = get_backend(path[0])
229 klass = get_backend(path[0])
230 repos[name] = klass(path[1], config=config)
230 repos[name] = klass(path[1], config=config)
231 except OSError:
231 except OSError:
232 continue
232 continue
233 log.debug('found %s paths with repositories', len(repos))
233 log.debug('found %s paths with repositories', len(repos))
234 return repos
234 return repos
235
235
236 def get_repos(self, all_repos=None, sort_key=None):
236 def get_repos(self, all_repos=None, sort_key=None):
237 """
237 """
238 Get all repositories from db and for each repo create it's
238 Get all repositories from db and for each repo create it's
239 backend instance and fill that backed with information from database
239 backend instance and fill that backed with information from database
240
240
241 :param all_repos: list of repository names as strings
241 :param all_repos: list of repository names as strings
242 give specific repositories list, good for filtering
242 give specific repositories list, good for filtering
243
243
244 :param sort_key: initial sorting of repositories
244 :param sort_key: initial sorting of repositories
245 """
245 """
246 if all_repos is None:
246 if all_repos is None:
247 all_repos = self.sa.query(Repository)\
247 all_repos = self.sa.query(Repository)\
248 .filter(Repository.group_id == None)\
248 .filter(Repository.group_id == None)\
249 .order_by(func.lower(Repository.repo_name)).all()
249 .order_by(func.lower(Repository.repo_name)).all()
250 repo_iter = SimpleCachedRepoList(
250 repo_iter = SimpleCachedRepoList(
251 all_repos, repos_path=self.repos_path, order_by=sort_key)
251 all_repos, repos_path=self.repos_path, order_by=sort_key)
252 return repo_iter
252 return repo_iter
253
253
254 def get_repo_groups(self, all_groups=None):
254 def get_repo_groups(self, all_groups=None):
255 if all_groups is None:
255 if all_groups is None:
256 all_groups = RepoGroup.query()\
256 all_groups = RepoGroup.query()\
257 .filter(RepoGroup.group_parent_id == None).all()
257 .filter(RepoGroup.group_parent_id == None).all()
258 return [x for x in RepoGroupList(all_groups)]
258 return [x for x in RepoGroupList(all_groups)]
259
259
260 def mark_for_invalidation(self, repo_name, delete=False):
260 def mark_for_invalidation(self, repo_name, delete=False):
261 """
261 """
262 Mark caches of this repo invalid in the database. `delete` flag
262 Mark caches of this repo invalid in the database. `delete` flag
263 removes the cache entries
263 removes the cache entries
264
264
265 :param repo_name: the repo_name for which caches should be marked
265 :param repo_name: the repo_name for which caches should be marked
266 invalid, or deleted
266 invalid, or deleted
267 :param delete: delete the entry keys instead of setting bool
267 :param delete: delete the entry keys instead of setting bool
268 flag on them, and also purge caches used by the dogpile
268 flag on them, and also purge caches used by the dogpile
269 """
269 """
270 repo = Repository.get_by_repo_name(repo_name)
270 repo = Repository.get_by_repo_name(repo_name)
271
271
272 if repo:
272 if repo:
273 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
273 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
274 repo_id=repo.repo_id)
274 repo_id=repo.repo_id)
275 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
275 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
276
276
277 repo_id = repo.repo_id
277 repo_id = repo.repo_id
278 config = repo._config
278 config = repo._config
279 config.set('extensions', 'largefiles', '')
279 config.set('extensions', 'largefiles', '')
280 repo.update_commit_cache(config=config, cs_cache=None)
280 repo.update_commit_cache(config=config, cs_cache=None)
281 if delete:
281 if delete:
282 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
282 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
283 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
283 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
284
284
285 def toggle_following_repo(self, follow_repo_id, user_id):
285 def toggle_following_repo(self, follow_repo_id, user_id):
286
286
287 f = self.sa.query(UserFollowing)\
287 f = self.sa.query(UserFollowing)\
288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 .filter(UserFollowing.user_id == user_id).scalar()
289 .filter(UserFollowing.user_id == user_id).scalar()
290
290
291 if f is not None:
291 if f is not None:
292 try:
292 try:
293 self.sa.delete(f)
293 self.sa.delete(f)
294 return
294 return
295 except Exception:
295 except Exception:
296 log.error(traceback.format_exc())
296 log.error(traceback.format_exc())
297 raise
297 raise
298
298
299 try:
299 try:
300 f = UserFollowing()
300 f = UserFollowing()
301 f.user_id = user_id
301 f.user_id = user_id
302 f.follows_repo_id = follow_repo_id
302 f.follows_repo_id = follow_repo_id
303 self.sa.add(f)
303 self.sa.add(f)
304 except Exception:
304 except Exception:
305 log.error(traceback.format_exc())
305 log.error(traceback.format_exc())
306 raise
306 raise
307
307
308 def toggle_following_user(self, follow_user_id, user_id):
308 def toggle_following_user(self, follow_user_id, user_id):
309 f = self.sa.query(UserFollowing)\
309 f = self.sa.query(UserFollowing)\
310 .filter(UserFollowing.follows_user_id == follow_user_id)\
310 .filter(UserFollowing.follows_user_id == follow_user_id)\
311 .filter(UserFollowing.user_id == user_id).scalar()
311 .filter(UserFollowing.user_id == user_id).scalar()
312
312
313 if f is not None:
313 if f is not None:
314 try:
314 try:
315 self.sa.delete(f)
315 self.sa.delete(f)
316 return
316 return
317 except Exception:
317 except Exception:
318 log.error(traceback.format_exc())
318 log.error(traceback.format_exc())
319 raise
319 raise
320
320
321 try:
321 try:
322 f = UserFollowing()
322 f = UserFollowing()
323 f.user_id = user_id
323 f.user_id = user_id
324 f.follows_user_id = follow_user_id
324 f.follows_user_id = follow_user_id
325 self.sa.add(f)
325 self.sa.add(f)
326 except Exception:
326 except Exception:
327 log.error(traceback.format_exc())
327 log.error(traceback.format_exc())
328 raise
328 raise
329
329
330 def is_following_repo(self, repo_name, user_id, cache=False):
330 def is_following_repo(self, repo_name, user_id, cache=False):
331 r = self.sa.query(Repository)\
331 r = self.sa.query(Repository)\
332 .filter(Repository.repo_name == repo_name).scalar()
332 .filter(Repository.repo_name == repo_name).scalar()
333
333
334 f = self.sa.query(UserFollowing)\
334 f = self.sa.query(UserFollowing)\
335 .filter(UserFollowing.follows_repository == r)\
335 .filter(UserFollowing.follows_repository == r)\
336 .filter(UserFollowing.user_id == user_id).scalar()
336 .filter(UserFollowing.user_id == user_id).scalar()
337
337
338 return f is not None
338 return f is not None
339
339
340 def is_following_user(self, username, user_id, cache=False):
340 def is_following_user(self, username, user_id, cache=False):
341 u = User.get_by_username(username)
341 u = User.get_by_username(username)
342
342
343 f = self.sa.query(UserFollowing)\
343 f = self.sa.query(UserFollowing)\
344 .filter(UserFollowing.follows_user == u)\
344 .filter(UserFollowing.follows_user == u)\
345 .filter(UserFollowing.user_id == user_id).scalar()
345 .filter(UserFollowing.user_id == user_id).scalar()
346
346
347 return f is not None
347 return f is not None
348
348
349 def get_followers(self, repo):
349 def get_followers(self, repo):
350 repo = self._get_repo(repo)
350 repo = self._get_repo(repo)
351
351
352 return self.sa.query(UserFollowing)\
352 return self.sa.query(UserFollowing)\
353 .filter(UserFollowing.follows_repository == repo).count()
353 .filter(UserFollowing.follows_repository == repo).count()
354
354
355 def get_forks(self, repo):
355 def get_forks(self, repo):
356 repo = self._get_repo(repo)
356 repo = self._get_repo(repo)
357 return self.sa.query(Repository)\
357 return self.sa.query(Repository)\
358 .filter(Repository.fork == repo).count()
358 .filter(Repository.fork == repo).count()
359
359
360 def get_pull_requests(self, repo):
360 def get_pull_requests(self, repo):
361 repo = self._get_repo(repo)
361 repo = self._get_repo(repo)
362 return self.sa.query(PullRequest)\
362 return self.sa.query(PullRequest)\
363 .filter(PullRequest.target_repo == repo)\
363 .filter(PullRequest.target_repo == repo)\
364 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
364 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
365
365
366 def mark_as_fork(self, repo, fork, user):
366 def mark_as_fork(self, repo, fork, user):
367 repo = self._get_repo(repo)
367 repo = self._get_repo(repo)
368 fork = self._get_repo(fork)
368 fork = self._get_repo(fork)
369 if fork and repo.repo_id == fork.repo_id:
369 if fork and repo.repo_id == fork.repo_id:
370 raise Exception("Cannot set repository as fork of itself")
370 raise Exception("Cannot set repository as fork of itself")
371
371
372 if fork and repo.repo_type != fork.repo_type:
372 if fork and repo.repo_type != fork.repo_type:
373 raise RepositoryError(
373 raise RepositoryError(
374 "Cannot set repository as fork of repository with other type")
374 "Cannot set repository as fork of repository with other type")
375
375
376 repo.fork = fork
376 repo.fork = fork
377 self.sa.add(repo)
377 self.sa.add(repo)
378 return repo
378 return repo
379
379
380 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
380 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
381 dbrepo = self._get_repo(repo)
381 dbrepo = self._get_repo(repo)
382 remote_uri = remote_uri or dbrepo.clone_uri
382 remote_uri = remote_uri or dbrepo.clone_uri
383 if not remote_uri:
383 if not remote_uri:
384 raise Exception("This repository doesn't have a clone uri")
384 raise Exception("This repository doesn't have a clone uri")
385
385
386 repo = dbrepo.scm_instance(cache=False)
386 repo = dbrepo.scm_instance(cache=False)
387 repo.config.clear_section('hooks')
387 repo.config.clear_section('hooks')
388
388
389 try:
389 try:
390 # NOTE(marcink): add extra validation so we skip invalid urls
390 # NOTE(marcink): add extra validation so we skip invalid urls
391 # this is due this tasks can be executed via scheduler without
391 # this is due this tasks can be executed via scheduler without
392 # proper validation of remote_uri
392 # proper validation of remote_uri
393 if validate_uri:
393 if validate_uri:
394 config = make_db_config(clear_session=False)
394 config = make_db_config(clear_session=False)
395 url_validator(remote_uri, dbrepo.repo_type, config)
395 url_validator(remote_uri, dbrepo.repo_type, config)
396 except InvalidCloneUrl:
396 except InvalidCloneUrl:
397 raise
397 raise
398
398
399 repo_name = dbrepo.repo_name
399 repo_name = dbrepo.repo_name
400 try:
400 try:
401 # TODO: we need to make sure those operations call proper hooks !
401 # TODO: we need to make sure those operations call proper hooks !
402 repo.fetch(remote_uri)
402 repo.fetch(remote_uri)
403
403
404 self.mark_for_invalidation(repo_name)
404 self.mark_for_invalidation(repo_name)
405 except Exception:
405 except Exception:
406 log.error(traceback.format_exc())
406 log.error(traceback.format_exc())
407 raise
407 raise
408
408
409 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
409 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
410 dbrepo = self._get_repo(repo)
410 dbrepo = self._get_repo(repo)
411 remote_uri = remote_uri or dbrepo.push_uri
411 remote_uri = remote_uri or dbrepo.push_uri
412 if not remote_uri:
412 if not remote_uri:
413 raise Exception("This repository doesn't have a clone uri")
413 raise Exception("This repository doesn't have a clone uri")
414
414
415 repo = dbrepo.scm_instance(cache=False)
415 repo = dbrepo.scm_instance(cache=False)
416 repo.config.clear_section('hooks')
416 repo.config.clear_section('hooks')
417
417
418 try:
418 try:
419 # NOTE(marcink): add extra validation so we skip invalid urls
419 # NOTE(marcink): add extra validation so we skip invalid urls
420 # this is due this tasks can be executed via scheduler without
420 # this is due this tasks can be executed via scheduler without
421 # proper validation of remote_uri
421 # proper validation of remote_uri
422 if validate_uri:
422 if validate_uri:
423 config = make_db_config(clear_session=False)
423 config = make_db_config(clear_session=False)
424 url_validator(remote_uri, dbrepo.repo_type, config)
424 url_validator(remote_uri, dbrepo.repo_type, config)
425 except InvalidCloneUrl:
425 except InvalidCloneUrl:
426 raise
426 raise
427
427
428 try:
428 try:
429 repo.push(remote_uri)
429 repo.push(remote_uri)
430 except Exception:
430 except Exception:
431 log.error(traceback.format_exc())
431 log.error(traceback.format_exc())
432 raise
432 raise
433
433
434 def commit_change(self, repo, repo_name, commit, user, author, message,
434 def commit_change(self, repo, repo_name, commit, user, author, message,
435 content, f_path):
435 content, f_path):
436 """
436 """
437 Commits changes
437 Commits changes
438
438
439 :param repo: SCM instance
439 :param repo: SCM instance
440
440
441 """
441 """
442 user = self._get_user(user)
442 user = self._get_user(user)
443
443
444 # decoding here will force that we have proper encoded values
444 # decoding here will force that we have proper encoded values
445 # in any other case this will throw exceptions and deny commit
445 # in any other case this will throw exceptions and deny commit
446 content = safe_str(content)
446 content = safe_str(content)
447 path = safe_str(f_path)
447 path = safe_str(f_path)
448 # message and author needs to be unicode
448 # message and author needs to be unicode
449 # proper backend should then translate that into required type
449 # proper backend should then translate that into required type
450 message = safe_unicode(message)
450 message = safe_unicode(message)
451 author = safe_unicode(author)
451 author = safe_unicode(author)
452 imc = repo.in_memory_commit
452 imc = repo.in_memory_commit
453 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
453 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
454 try:
454 try:
455 # TODO: handle pre-push action !
455 # TODO: handle pre-push action !
456 tip = imc.commit(
456 tip = imc.commit(
457 message=message, author=author, parents=[commit],
457 message=message, author=author, parents=[commit],
458 branch=commit.branch)
458 branch=commit.branch)
459 except Exception as e:
459 except Exception as e:
460 log.error(traceback.format_exc())
460 log.error(traceback.format_exc())
461 raise IMCCommitError(str(e))
461 raise IMCCommitError(str(e))
462 finally:
462 finally:
463 # always clear caches, if commit fails we want fresh object also
463 # always clear caches, if commit fails we want fresh object also
464 self.mark_for_invalidation(repo_name)
464 self.mark_for_invalidation(repo_name)
465
465
466 # We trigger the post-push action
466 # We trigger the post-push action
467 hooks_utils.trigger_post_push_hook(
467 hooks_utils.trigger_post_push_hook(
468 username=user.username, action='push_local', hook_type='post_push',
468 username=user.username, action='push_local', hook_type='post_push',
469 repo_name=repo_name, repo_alias=repo.alias, commit_ids=[tip.raw_id])
469 repo_name=repo_name, repo_alias=repo.alias, commit_ids=[tip.raw_id])
470 return tip
470 return tip
471
471
472 def _sanitize_path(self, f_path):
472 def _sanitize_path(self, f_path):
473 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
473 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
474 raise NonRelativePathError('%s is not an relative path' % f_path)
474 raise NonRelativePathError('%s is not an relative path' % f_path)
475 if f_path:
475 if f_path:
476 f_path = os.path.normpath(f_path)
476 f_path = os.path.normpath(f_path)
477 return f_path
477 return f_path
478
478
479 def get_dirnode_metadata(self, request, commit, dir_node):
479 def get_dirnode_metadata(self, request, commit, dir_node):
480 if not dir_node.is_dir():
480 if not dir_node.is_dir():
481 return []
481 return []
482
482
483 data = []
483 data = []
484 for node in dir_node:
484 for node in dir_node:
485 if not node.is_file():
485 if not node.is_file():
486 # we skip file-nodes
486 # we skip file-nodes
487 continue
487 continue
488
488
489 last_commit = node.last_commit
489 last_commit = node.last_commit
490 last_commit_date = last_commit.date
490 last_commit_date = last_commit.date
491 data.append({
491 data.append({
492 'name': node.name,
492 'name': node.name,
493 'size': h.format_byte_size_binary(node.size),
493 'size': h.format_byte_size_binary(node.size),
494 'modified_at': h.format_date(last_commit_date),
494 'modified_at': h.format_date(last_commit_date),
495 'modified_ts': last_commit_date.isoformat(),
495 'modified_ts': last_commit_date.isoformat(),
496 'revision': last_commit.revision,
496 'revision': last_commit.revision,
497 'short_id': last_commit.short_id,
497 'short_id': last_commit.short_id,
498 'message': h.escape(last_commit.message),
498 'message': h.escape(last_commit.message),
499 'author': h.escape(last_commit.author),
499 'author': h.escape(last_commit.author),
500 'user_profile': h.gravatar_with_user(
500 'user_profile': h.gravatar_with_user(
501 request, last_commit.author),
501 request, last_commit.author),
502 })
502 })
503
503
504 return data
504 return data
505
505
506 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
506 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
507 extended_info=False, content=False, max_file_bytes=None):
507 extended_info=False, content=False, max_file_bytes=None):
508 """
508 """
509 recursive walk in root dir and return a set of all path in that dir
509 recursive walk in root dir and return a set of all path in that dir
510 based on repository walk function
510 based on repository walk function
511
511
512 :param repo_name: name of repository
512 :param repo_name: name of repository
513 :param commit_id: commit id for which to list nodes
513 :param commit_id: commit id for which to list nodes
514 :param root_path: root path to list
514 :param root_path: root path to list
515 :param flat: return as a list, if False returns a dict with description
515 :param flat: return as a list, if False returns a dict with description
516 :param extended_info: show additional info such as md5, binary, size etc
516 :param extended_info: show additional info such as md5, binary, size etc
517 :param content: add nodes content to the return data
517 :param content: add nodes content to the return data
518 :param max_file_bytes: will not return file contents over this limit
518 :param max_file_bytes: will not return file contents over this limit
519
519
520 """
520 """
521 _files = list()
521 _files = list()
522 _dirs = list()
522 _dirs = list()
523 try:
523 try:
524 _repo = self._get_repo(repo_name)
524 _repo = self._get_repo(repo_name)
525 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
525 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
526 root_path = root_path.lstrip('/')
526 root_path = root_path.lstrip('/')
527 for __, dirs, files in commit.walk(root_path):
527 for __, dirs, files in commit.walk(root_path):
528
528
529 for f in files:
529 for f in files:
530 _content = None
530 _content = None
531 _data = f_name = f.unicode_path
531 _data = f_name = f.unicode_path
532
532
533 if not flat:
533 if not flat:
534 _data = {
534 _data = {
535 "name": h.escape(f_name),
535 "name": h.escape(f_name),
536 "type": "file",
536 "type": "file",
537 }
537 }
538 if extended_info:
538 if extended_info:
539 _data.update({
539 _data.update({
540 "md5": f.md5,
540 "md5": f.md5,
541 "binary": f.is_binary,
541 "binary": f.is_binary,
542 "size": f.size,
542 "size": f.size,
543 "extension": f.extension,
543 "extension": f.extension,
544 "mimetype": f.mimetype,
544 "mimetype": f.mimetype,
545 "lines": f.lines()[0]
545 "lines": f.lines()[0]
546 })
546 })
547
547
548 if content:
548 if content:
549 over_size_limit = (max_file_bytes is not None
549 over_size_limit = (max_file_bytes is not None
550 and f.size > max_file_bytes)
550 and f.size > max_file_bytes)
551 full_content = None
551 full_content = None
552 if not f.is_binary and not over_size_limit:
552 if not f.is_binary and not over_size_limit:
553 full_content = safe_str(f.content)
553 full_content = safe_str(f.content)
554
554
555 _data.update({
555 _data.update({
556 "content": full_content,
556 "content": full_content,
557 })
557 })
558 _files.append(_data)
558 _files.append(_data)
559
559
560 for d in dirs:
560 for d in dirs:
561 _data = d_name = d.unicode_path
561 _data = d_name = d.unicode_path
562 if not flat:
562 if not flat:
563 _data = {
563 _data = {
564 "name": h.escape(d_name),
564 "name": h.escape(d_name),
565 "type": "dir",
565 "type": "dir",
566 }
566 }
567 if extended_info:
567 if extended_info:
568 _data.update({
568 _data.update({
569 "md5": None,
569 "md5": None,
570 "binary": None,
570 "binary": None,
571 "size": None,
571 "size": None,
572 "extension": None,
572 "extension": None,
573 })
573 })
574 if content:
574 if content:
575 _data.update({
575 _data.update({
576 "content": None
576 "content": None
577 })
577 })
578 _dirs.append(_data)
578 _dirs.append(_data)
579 except RepositoryError:
579 except RepositoryError:
580 log.exception("Exception in get_nodes")
580 log.exception("Exception in get_nodes")
581 raise
581 raise
582
582
583 return _dirs, _files
583 return _dirs, _files
584
584
585 def get_node(self, repo_name, commit_id, file_path,
585 def get_node(self, repo_name, commit_id, file_path,
586 extended_info=False, content=False, max_file_bytes=None, cache=True):
586 extended_info=False, content=False, max_file_bytes=None, cache=True):
587 """
587 """
588 retrieve single node from commit
588 retrieve single node from commit
589 """
589 """
590 try:
590 try:
591
591
592 _repo = self._get_repo(repo_name)
592 _repo = self._get_repo(repo_name)
593 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
593 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
594
594
595 file_node = commit.get_node(file_path)
595 file_node = commit.get_node(file_path)
596 if file_node.is_dir():
596 if file_node.is_dir():
597 raise RepositoryError('The given path is a directory')
597 raise RepositoryError('The given path is a directory')
598
598
599 _content = None
599 _content = None
600 f_name = file_node.unicode_path
600 f_name = file_node.unicode_path
601
601
602 file_data = {
602 file_data = {
603 "name": h.escape(f_name),
603 "name": h.escape(f_name),
604 "type": "file",
604 "type": "file",
605 }
605 }
606
606
607 if extended_info:
607 if extended_info:
608 file_data.update({
608 file_data.update({
609 "extension": file_node.extension,
609 "extension": file_node.extension,
610 "mimetype": file_node.mimetype,
610 "mimetype": file_node.mimetype,
611 })
611 })
612
612
613 if cache:
613 if cache:
614 md5 = file_node.md5
614 md5 = file_node.md5
615 is_binary = file_node.is_binary
615 is_binary = file_node.is_binary
616 size = file_node.size
616 size = file_node.size
617 else:
617 else:
618 is_binary, md5, size, _content = file_node.metadata_uncached()
618 is_binary, md5, size, _content = file_node.metadata_uncached()
619
619
620 file_data.update({
620 file_data.update({
621 "md5": md5,
621 "md5": md5,
622 "binary": is_binary,
622 "binary": is_binary,
623 "size": size,
623 "size": size,
624 })
624 })
625
625
626 if content and cache:
626 if content and cache:
627 # get content + cache
627 # get content + cache
628 size = file_node.size
628 size = file_node.size
629 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
629 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
630 full_content = None
630 full_content = None
631 if not file_node.is_binary and not over_size_limit:
631 if not file_node.is_binary and not over_size_limit:
632 full_content = safe_unicode(file_node.content)
632 full_content = safe_unicode(file_node.content)
633
633
634 file_data.update({
634 file_data.update({
635 "content": full_content,
635 "content": full_content,
636 })
636 })
637 elif content:
637 elif content:
638 # get content *without* cache
638 # get content *without* cache
639 if _content is None:
639 if _content is None:
640 is_binary, md5, size, _content = file_node.metadata_uncached()
640 is_binary, md5, size, _content = file_node.metadata_uncached()
641
641
642 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
642 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
643 full_content = None
643 full_content = None
644 if not is_binary and not over_size_limit:
644 if not is_binary and not over_size_limit:
645 full_content = safe_unicode(_content)
645 full_content = safe_unicode(_content)
646
646
647 file_data.update({
647 file_data.update({
648 "content": full_content,
648 "content": full_content,
649 })
649 })
650
650
651 except RepositoryError:
651 except RepositoryError:
652 log.exception("Exception in get_node")
652 log.exception("Exception in get_node")
653 raise
653 raise
654
654
655 return file_data
655 return file_data
656
656
657 def get_fts_data(self, repo_name, commit_id, root_path='/'):
657 def get_fts_data(self, repo_name, commit_id, root_path='/'):
658 """
658 """
659 Fetch node tree for usage in full text search
659 Fetch node tree for usage in full text search
660 """
660 """
661
661
662 tree_info = list()
662 tree_info = list()
663
663
664 try:
664 try:
665 _repo = self._get_repo(repo_name)
665 _repo = self._get_repo(repo_name)
666 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
666 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
667 root_path = root_path.lstrip('/')
667 root_path = root_path.lstrip('/')
668 for __, dirs, files in commit.walk(root_path):
668 for __, dirs, files in commit.walk(root_path):
669
669
670 for f in files:
670 for f in files:
671 _content = None
672 _data = f_name = f.unicode_path
673 is_binary, md5, size, _content = f.metadata_uncached()
671 is_binary, md5, size, _content = f.metadata_uncached()
674 _data = {
672 _data = {
675 "name": h.escape(f_name),
673 "name": f.unicode_path,
676 "md5": md5,
674 "md5": md5,
677 "extension": f.extension,
675 "extension": f.extension,
678 "binary": is_binary,
676 "binary": is_binary,
679 "size": size
677 "size": size
680 }
678 }
681
679
682 tree_info.append(_data)
680 tree_info.append(_data)
683
681
684 except RepositoryError:
682 except RepositoryError:
685 log.exception("Exception in get_nodes")
683 log.exception("Exception in get_nodes")
686 raise
684 raise
687
685
688 return tree_info
686 return tree_info
689
687
690 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
688 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
691 author=None, trigger_push_hook=True):
689 author=None, trigger_push_hook=True):
692 """
690 """
693 Commits given multiple nodes into repo
691 Commits given multiple nodes into repo
694
692
695 :param user: RhodeCode User object or user_id, the commiter
693 :param user: RhodeCode User object or user_id, the commiter
696 :param repo: RhodeCode Repository object
694 :param repo: RhodeCode Repository object
697 :param message: commit message
695 :param message: commit message
698 :param nodes: mapping {filename:{'content':content},...}
696 :param nodes: mapping {filename:{'content':content},...}
699 :param parent_commit: parent commit, can be empty than it's
697 :param parent_commit: parent commit, can be empty than it's
700 initial commit
698 initial commit
701 :param author: author of commit, cna be different that commiter
699 :param author: author of commit, cna be different that commiter
702 only for git
700 only for git
703 :param trigger_push_hook: trigger push hooks
701 :param trigger_push_hook: trigger push hooks
704
702
705 :returns: new commited commit
703 :returns: new commited commit
706 """
704 """
707
705
708 user = self._get_user(user)
706 user = self._get_user(user)
709 scm_instance = repo.scm_instance(cache=False)
707 scm_instance = repo.scm_instance(cache=False)
710
708
711 processed_nodes = []
709 processed_nodes = []
712 for f_path in nodes:
710 for f_path in nodes:
713 f_path = self._sanitize_path(f_path)
711 f_path = self._sanitize_path(f_path)
714 content = nodes[f_path]['content']
712 content = nodes[f_path]['content']
715 f_path = safe_str(f_path)
713 f_path = safe_str(f_path)
716 # decoding here will force that we have proper encoded values
714 # decoding here will force that we have proper encoded values
717 # in any other case this will throw exceptions and deny commit
715 # in any other case this will throw exceptions and deny commit
718 if isinstance(content, (basestring,)):
716 if isinstance(content, (basestring,)):
719 content = safe_str(content)
717 content = safe_str(content)
720 elif isinstance(content, (file, cStringIO.OutputType,)):
718 elif isinstance(content, (file, cStringIO.OutputType,)):
721 content = content.read()
719 content = content.read()
722 else:
720 else:
723 raise Exception('Content is of unrecognized type %s' % (
721 raise Exception('Content is of unrecognized type %s' % (
724 type(content)
722 type(content)
725 ))
723 ))
726 processed_nodes.append((f_path, content))
724 processed_nodes.append((f_path, content))
727
725
728 message = safe_unicode(message)
726 message = safe_unicode(message)
729 commiter = user.full_contact
727 commiter = user.full_contact
730 author = safe_unicode(author) if author else commiter
728 author = safe_unicode(author) if author else commiter
731
729
732 imc = scm_instance.in_memory_commit
730 imc = scm_instance.in_memory_commit
733
731
734 if not parent_commit:
732 if not parent_commit:
735 parent_commit = EmptyCommit(alias=scm_instance.alias)
733 parent_commit = EmptyCommit(alias=scm_instance.alias)
736
734
737 if isinstance(parent_commit, EmptyCommit):
735 if isinstance(parent_commit, EmptyCommit):
738 # EmptyCommit means we we're editing empty repository
736 # EmptyCommit means we we're editing empty repository
739 parents = None
737 parents = None
740 else:
738 else:
741 parents = [parent_commit]
739 parents = [parent_commit]
742 # add multiple nodes
740 # add multiple nodes
743 for path, content in processed_nodes:
741 for path, content in processed_nodes:
744 imc.add(FileNode(path, content=content))
742 imc.add(FileNode(path, content=content))
745 # TODO: handle pre push scenario
743 # TODO: handle pre push scenario
746 tip = imc.commit(message=message,
744 tip = imc.commit(message=message,
747 author=author,
745 author=author,
748 parents=parents,
746 parents=parents,
749 branch=parent_commit.branch)
747 branch=parent_commit.branch)
750
748
751 self.mark_for_invalidation(repo.repo_name)
749 self.mark_for_invalidation(repo.repo_name)
752 if trigger_push_hook:
750 if trigger_push_hook:
753 hooks_utils.trigger_post_push_hook(
751 hooks_utils.trigger_post_push_hook(
754 username=user.username, action='push_local',
752 username=user.username, action='push_local',
755 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
753 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
756 hook_type='post_push',
754 hook_type='post_push',
757 commit_ids=[tip.raw_id])
755 commit_ids=[tip.raw_id])
758 return tip
756 return tip
759
757
760 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
758 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
761 author=None, trigger_push_hook=True):
759 author=None, trigger_push_hook=True):
762 user = self._get_user(user)
760 user = self._get_user(user)
763 scm_instance = repo.scm_instance(cache=False)
761 scm_instance = repo.scm_instance(cache=False)
764
762
765 message = safe_unicode(message)
763 message = safe_unicode(message)
766 commiter = user.full_contact
764 commiter = user.full_contact
767 author = safe_unicode(author) if author else commiter
765 author = safe_unicode(author) if author else commiter
768
766
769 imc = scm_instance.in_memory_commit
767 imc = scm_instance.in_memory_commit
770
768
771 if not parent_commit:
769 if not parent_commit:
772 parent_commit = EmptyCommit(alias=scm_instance.alias)
770 parent_commit = EmptyCommit(alias=scm_instance.alias)
773
771
774 if isinstance(parent_commit, EmptyCommit):
772 if isinstance(parent_commit, EmptyCommit):
775 # EmptyCommit means we we're editing empty repository
773 # EmptyCommit means we we're editing empty repository
776 parents = None
774 parents = None
777 else:
775 else:
778 parents = [parent_commit]
776 parents = [parent_commit]
779
777
780 # add multiple nodes
778 # add multiple nodes
781 for _filename, data in nodes.items():
779 for _filename, data in nodes.items():
782 # new filename, can be renamed from the old one, also sanitaze
780 # new filename, can be renamed from the old one, also sanitaze
783 # the path for any hack around relative paths like ../../ etc.
781 # the path for any hack around relative paths like ../../ etc.
784 filename = self._sanitize_path(data['filename'])
782 filename = self._sanitize_path(data['filename'])
785 old_filename = self._sanitize_path(_filename)
783 old_filename = self._sanitize_path(_filename)
786 content = data['content']
784 content = data['content']
787 file_mode = data.get('mode')
785 file_mode = data.get('mode')
788 filenode = FileNode(old_filename, content=content, mode=file_mode)
786 filenode = FileNode(old_filename, content=content, mode=file_mode)
789 op = data['op']
787 op = data['op']
790 if op == 'add':
788 if op == 'add':
791 imc.add(filenode)
789 imc.add(filenode)
792 elif op == 'del':
790 elif op == 'del':
793 imc.remove(filenode)
791 imc.remove(filenode)
794 elif op == 'mod':
792 elif op == 'mod':
795 if filename != old_filename:
793 if filename != old_filename:
796 # TODO: handle renames more efficient, needs vcs lib changes
794 # TODO: handle renames more efficient, needs vcs lib changes
797 imc.remove(filenode)
795 imc.remove(filenode)
798 imc.add(FileNode(filename, content=content, mode=file_mode))
796 imc.add(FileNode(filename, content=content, mode=file_mode))
799 else:
797 else:
800 imc.change(filenode)
798 imc.change(filenode)
801
799
802 try:
800 try:
803 # TODO: handle pre push scenario commit changes
801 # TODO: handle pre push scenario commit changes
804 tip = imc.commit(message=message,
802 tip = imc.commit(message=message,
805 author=author,
803 author=author,
806 parents=parents,
804 parents=parents,
807 branch=parent_commit.branch)
805 branch=parent_commit.branch)
808 except NodeNotChangedError:
806 except NodeNotChangedError:
809 raise
807 raise
810 except Exception as e:
808 except Exception as e:
811 log.exception("Unexpected exception during call to imc.commit")
809 log.exception("Unexpected exception during call to imc.commit")
812 raise IMCCommitError(str(e))
810 raise IMCCommitError(str(e))
813 finally:
811 finally:
814 # always clear caches, if commit fails we want fresh object also
812 # always clear caches, if commit fails we want fresh object also
815 self.mark_for_invalidation(repo.repo_name)
813 self.mark_for_invalidation(repo.repo_name)
816
814
817 if trigger_push_hook:
815 if trigger_push_hook:
818 hooks_utils.trigger_post_push_hook(
816 hooks_utils.trigger_post_push_hook(
819 username=user.username, action='push_local', hook_type='post_push',
817 username=user.username, action='push_local', hook_type='post_push',
820 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
818 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
821 commit_ids=[tip.raw_id])
819 commit_ids=[tip.raw_id])
822
820
823 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
821 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
824 author=None, trigger_push_hook=True):
822 author=None, trigger_push_hook=True):
825 """
823 """
826 Deletes given multiple nodes into `repo`
824 Deletes given multiple nodes into `repo`
827
825
828 :param user: RhodeCode User object or user_id, the committer
826 :param user: RhodeCode User object or user_id, the committer
829 :param repo: RhodeCode Repository object
827 :param repo: RhodeCode Repository object
830 :param message: commit message
828 :param message: commit message
831 :param nodes: mapping {filename:{'content':content},...}
829 :param nodes: mapping {filename:{'content':content},...}
832 :param parent_commit: parent commit, can be empty than it's initial
830 :param parent_commit: parent commit, can be empty than it's initial
833 commit
831 commit
834 :param author: author of commit, cna be different that commiter only
832 :param author: author of commit, cna be different that commiter only
835 for git
833 for git
836 :param trigger_push_hook: trigger push hooks
834 :param trigger_push_hook: trigger push hooks
837
835
838 :returns: new commit after deletion
836 :returns: new commit after deletion
839 """
837 """
840
838
841 user = self._get_user(user)
839 user = self._get_user(user)
842 scm_instance = repo.scm_instance(cache=False)
840 scm_instance = repo.scm_instance(cache=False)
843
841
844 processed_nodes = []
842 processed_nodes = []
845 for f_path in nodes:
843 for f_path in nodes:
846 f_path = self._sanitize_path(f_path)
844 f_path = self._sanitize_path(f_path)
847 # content can be empty but for compatabilty it allows same dicts
845 # content can be empty but for compatabilty it allows same dicts
848 # structure as add_nodes
846 # structure as add_nodes
849 content = nodes[f_path].get('content')
847 content = nodes[f_path].get('content')
850 processed_nodes.append((f_path, content))
848 processed_nodes.append((f_path, content))
851
849
852 message = safe_unicode(message)
850 message = safe_unicode(message)
853 commiter = user.full_contact
851 commiter = user.full_contact
854 author = safe_unicode(author) if author else commiter
852 author = safe_unicode(author) if author else commiter
855
853
856 imc = scm_instance.in_memory_commit
854 imc = scm_instance.in_memory_commit
857
855
858 if not parent_commit:
856 if not parent_commit:
859 parent_commit = EmptyCommit(alias=scm_instance.alias)
857 parent_commit = EmptyCommit(alias=scm_instance.alias)
860
858
861 if isinstance(parent_commit, EmptyCommit):
859 if isinstance(parent_commit, EmptyCommit):
862 # EmptyCommit means we we're editing empty repository
860 # EmptyCommit means we we're editing empty repository
863 parents = None
861 parents = None
864 else:
862 else:
865 parents = [parent_commit]
863 parents = [parent_commit]
866 # add multiple nodes
864 # add multiple nodes
867 for path, content in processed_nodes:
865 for path, content in processed_nodes:
868 imc.remove(FileNode(path, content=content))
866 imc.remove(FileNode(path, content=content))
869
867
870 # TODO: handle pre push scenario
868 # TODO: handle pre push scenario
871 tip = imc.commit(message=message,
869 tip = imc.commit(message=message,
872 author=author,
870 author=author,
873 parents=parents,
871 parents=parents,
874 branch=parent_commit.branch)
872 branch=parent_commit.branch)
875
873
876 self.mark_for_invalidation(repo.repo_name)
874 self.mark_for_invalidation(repo.repo_name)
877 if trigger_push_hook:
875 if trigger_push_hook:
878 hooks_utils.trigger_post_push_hook(
876 hooks_utils.trigger_post_push_hook(
879 username=user.username, action='push_local', hook_type='post_push',
877 username=user.username, action='push_local', hook_type='post_push',
880 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
878 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
881 commit_ids=[tip.raw_id])
879 commit_ids=[tip.raw_id])
882 return tip
880 return tip
883
881
884 def strip(self, repo, commit_id, branch):
882 def strip(self, repo, commit_id, branch):
885 scm_instance = repo.scm_instance(cache=False)
883 scm_instance = repo.scm_instance(cache=False)
886 scm_instance.config.clear_section('hooks')
884 scm_instance.config.clear_section('hooks')
887 scm_instance.strip(commit_id, branch)
885 scm_instance.strip(commit_id, branch)
888 self.mark_for_invalidation(repo.repo_name)
886 self.mark_for_invalidation(repo.repo_name)
889
887
890 def get_unread_journal(self):
888 def get_unread_journal(self):
891 return self.sa.query(UserLog).count()
889 return self.sa.query(UserLog).count()
892
890
893 def get_repo_landing_revs(self, translator, repo=None):
891 def get_repo_landing_revs(self, translator, repo=None):
894 """
892 """
895 Generates select option with tags branches and bookmarks (for hg only)
893 Generates select option with tags branches and bookmarks (for hg only)
896 grouped by type
894 grouped by type
897
895
898 :param repo:
896 :param repo:
899 """
897 """
900 _ = translator
898 _ = translator
901 repo = self._get_repo(repo)
899 repo = self._get_repo(repo)
902
900
903 hist_l = [
901 hist_l = [
904 ['rev:tip', _('latest tip')]
902 ['rev:tip', _('latest tip')]
905 ]
903 ]
906 choices = [
904 choices = [
907 'rev:tip'
905 'rev:tip'
908 ]
906 ]
909
907
910 if not repo:
908 if not repo:
911 return choices, hist_l
909 return choices, hist_l
912
910
913 repo = repo.scm_instance()
911 repo = repo.scm_instance()
914
912
915 branches_group = (
913 branches_group = (
916 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
914 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
917 for b in repo.branches],
915 for b in repo.branches],
918 _("Branches"))
916 _("Branches"))
919 hist_l.append(branches_group)
917 hist_l.append(branches_group)
920 choices.extend([x[0] for x in branches_group[0]])
918 choices.extend([x[0] for x in branches_group[0]])
921
919
922 if repo.alias == 'hg':
920 if repo.alias == 'hg':
923 bookmarks_group = (
921 bookmarks_group = (
924 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
922 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
925 for b in repo.bookmarks],
923 for b in repo.bookmarks],
926 _("Bookmarks"))
924 _("Bookmarks"))
927 hist_l.append(bookmarks_group)
925 hist_l.append(bookmarks_group)
928 choices.extend([x[0] for x in bookmarks_group[0]])
926 choices.extend([x[0] for x in bookmarks_group[0]])
929
927
930 tags_group = (
928 tags_group = (
931 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
929 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
932 for t in repo.tags],
930 for t in repo.tags],
933 _("Tags"))
931 _("Tags"))
934 hist_l.append(tags_group)
932 hist_l.append(tags_group)
935 choices.extend([x[0] for x in tags_group[0]])
933 choices.extend([x[0] for x in tags_group[0]])
936
934
937 return choices, hist_l
935 return choices, hist_l
938
936
939 def get_server_info(self, environ=None):
937 def get_server_info(self, environ=None):
940 server_info = get_system_info(environ)
938 server_info = get_system_info(environ)
941 return server_info
939 return server_info
General Comments 0
You need to be logged in to leave comments. Login now