Show More
@@ -1,818 +1,831 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Scm model for RhodeCode |
|
22 | Scm model for RhodeCode | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import os.path |
|
25 | import os.path | |
26 | import re |
|
|||
27 | import sys |
|
|||
28 | import traceback |
|
26 | import traceback | |
29 | import logging |
|
27 | import logging | |
30 | import cStringIO |
|
28 | import cStringIO | |
31 | import pkg_resources |
|
|||
32 |
|
29 | |||
33 | from sqlalchemy import func |
|
30 | from sqlalchemy import func | |
34 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
31 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
35 |
|
32 | |||
36 | import rhodecode |
|
33 | import rhodecode | |
37 | from rhodecode.lib.vcs import get_backend |
|
34 | from rhodecode.lib.vcs import get_backend | |
38 | from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError |
|
35 | from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError | |
39 | from rhodecode.lib.vcs.nodes import FileNode |
|
36 | from rhodecode.lib.vcs.nodes import FileNode | |
40 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
37 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
41 | from rhodecode.lib import helpers as h, rc_cache |
|
38 | from rhodecode.lib import helpers as h, rc_cache | |
42 | from rhodecode.lib.auth import ( |
|
39 | from rhodecode.lib.auth import ( | |
43 | HasRepoPermissionAny, HasRepoGroupPermissionAny, |
|
40 | HasRepoPermissionAny, HasRepoGroupPermissionAny, | |
44 | HasUserGroupPermissionAny) |
|
41 | HasUserGroupPermissionAny) | |
45 | from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError |
|
42 | from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError | |
46 | from rhodecode.lib import hooks_utils |
|
43 | from rhodecode.lib import hooks_utils | |
47 | from rhodecode.lib.utils import ( |
|
44 | from rhodecode.lib.utils import ( | |
48 | get_filesystem_repos, make_db_config) |
|
45 | get_filesystem_repos, make_db_config) | |
49 | from rhodecode.lib.utils2 import (safe_str, safe_unicode) |
|
46 | from rhodecode.lib.utils2 import (safe_str, safe_unicode) | |
50 | from rhodecode.lib.system_info import get_system_info |
|
47 | from rhodecode.lib.system_info import get_system_info | |
51 | from rhodecode.model import BaseModel |
|
48 | from rhodecode.model import BaseModel | |
52 | from rhodecode.model.db import ( |
|
49 | from rhodecode.model.db import ( | |
53 | Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup, |
|
50 | Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup, | |
54 | PullRequest) |
|
51 | PullRequest) | |
55 | from rhodecode.model.settings import VcsSettingsModel |
|
52 | from rhodecode.model.settings import VcsSettingsModel | |
|
53 | from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl | |||
56 |
|
54 | |||
57 | log = logging.getLogger(__name__) |
|
55 | log = logging.getLogger(__name__) | |
58 |
|
56 | |||
59 |
|
57 | |||
60 | class UserTemp(object): |
|
58 | class UserTemp(object): | |
61 | def __init__(self, user_id): |
|
59 | def __init__(self, user_id): | |
62 | self.user_id = user_id |
|
60 | self.user_id = user_id | |
63 |
|
61 | |||
64 | def __repr__(self): |
|
62 | def __repr__(self): | |
65 | return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id) |
|
63 | return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id) | |
66 |
|
64 | |||
67 |
|
65 | |||
68 | class RepoTemp(object): |
|
66 | class RepoTemp(object): | |
69 | def __init__(self, repo_id): |
|
67 | def __init__(self, repo_id): | |
70 | self.repo_id = repo_id |
|
68 | self.repo_id = repo_id | |
71 |
|
69 | |||
72 | def __repr__(self): |
|
70 | def __repr__(self): | |
73 | return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id) |
|
71 | return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id) | |
74 |
|
72 | |||
75 |
|
73 | |||
76 | class SimpleCachedRepoList(object): |
|
74 | class SimpleCachedRepoList(object): | |
77 | """ |
|
75 | """ | |
78 | Lighter version of of iteration of repos without the scm initialisation, |
|
76 | Lighter version of of iteration of repos without the scm initialisation, | |
79 | and with cache usage |
|
77 | and with cache usage | |
80 | """ |
|
78 | """ | |
81 | def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None): |
|
79 | def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None): | |
82 | self.db_repo_list = db_repo_list |
|
80 | self.db_repo_list = db_repo_list | |
83 | self.repos_path = repos_path |
|
81 | self.repos_path = repos_path | |
84 | self.order_by = order_by |
|
82 | self.order_by = order_by | |
85 | self.reversed = (order_by or '').startswith('-') |
|
83 | self.reversed = (order_by or '').startswith('-') | |
86 | if not perm_set: |
|
84 | if not perm_set: | |
87 | perm_set = ['repository.read', 'repository.write', |
|
85 | perm_set = ['repository.read', 'repository.write', | |
88 | 'repository.admin'] |
|
86 | 'repository.admin'] | |
89 | self.perm_set = perm_set |
|
87 | self.perm_set = perm_set | |
90 |
|
88 | |||
91 | def __len__(self): |
|
89 | def __len__(self): | |
92 | return len(self.db_repo_list) |
|
90 | return len(self.db_repo_list) | |
93 |
|
91 | |||
94 | def __repr__(self): |
|
92 | def __repr__(self): | |
95 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) |
|
93 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) | |
96 |
|
94 | |||
97 | def __iter__(self): |
|
95 | def __iter__(self): | |
98 | for dbr in self.db_repo_list: |
|
96 | for dbr in self.db_repo_list: | |
99 | # check permission at this level |
|
97 | # check permission at this level | |
100 | has_perm = HasRepoPermissionAny(*self.perm_set)( |
|
98 | has_perm = HasRepoPermissionAny(*self.perm_set)( | |
101 | dbr.repo_name, 'SimpleCachedRepoList check') |
|
99 | dbr.repo_name, 'SimpleCachedRepoList check') | |
102 | if not has_perm: |
|
100 | if not has_perm: | |
103 | continue |
|
101 | continue | |
104 |
|
102 | |||
105 | tmp_d = { |
|
103 | tmp_d = { | |
106 | 'name': dbr.repo_name, |
|
104 | 'name': dbr.repo_name, | |
107 | 'dbrepo': dbr.get_dict(), |
|
105 | 'dbrepo': dbr.get_dict(), | |
108 | 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {} |
|
106 | 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {} | |
109 | } |
|
107 | } | |
110 | yield tmp_d |
|
108 | yield tmp_d | |
111 |
|
109 | |||
112 |
|
110 | |||
113 | class _PermCheckIterator(object): |
|
111 | class _PermCheckIterator(object): | |
114 |
|
112 | |||
115 | def __init__( |
|
113 | def __init__( | |
116 | self, obj_list, obj_attr, perm_set, perm_checker, |
|
114 | self, obj_list, obj_attr, perm_set, perm_checker, | |
117 | extra_kwargs=None): |
|
115 | extra_kwargs=None): | |
118 | """ |
|
116 | """ | |
119 | Creates iterator from given list of objects, additionally |
|
117 | Creates iterator from given list of objects, additionally | |
120 | checking permission for them from perm_set var |
|
118 | checking permission for them from perm_set var | |
121 |
|
119 | |||
122 | :param obj_list: list of db objects |
|
120 | :param obj_list: list of db objects | |
123 | :param obj_attr: attribute of object to pass into perm_checker |
|
121 | :param obj_attr: attribute of object to pass into perm_checker | |
124 | :param perm_set: list of permissions to check |
|
122 | :param perm_set: list of permissions to check | |
125 | :param perm_checker: callable to check permissions against |
|
123 | :param perm_checker: callable to check permissions against | |
126 | """ |
|
124 | """ | |
127 | self.obj_list = obj_list |
|
125 | self.obj_list = obj_list | |
128 | self.obj_attr = obj_attr |
|
126 | self.obj_attr = obj_attr | |
129 | self.perm_set = perm_set |
|
127 | self.perm_set = perm_set | |
130 | self.perm_checker = perm_checker |
|
128 | self.perm_checker = perm_checker | |
131 | self.extra_kwargs = extra_kwargs or {} |
|
129 | self.extra_kwargs = extra_kwargs or {} | |
132 |
|
130 | |||
133 | def __len__(self): |
|
131 | def __len__(self): | |
134 | return len(self.obj_list) |
|
132 | return len(self.obj_list) | |
135 |
|
133 | |||
136 | def __repr__(self): |
|
134 | def __repr__(self): | |
137 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) |
|
135 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) | |
138 |
|
136 | |||
139 | def __iter__(self): |
|
137 | def __iter__(self): | |
140 | checker = self.perm_checker(*self.perm_set) |
|
138 | checker = self.perm_checker(*self.perm_set) | |
141 | for db_obj in self.obj_list: |
|
139 | for db_obj in self.obj_list: | |
142 | # check permission at this level |
|
140 | # check permission at this level | |
143 | name = getattr(db_obj, self.obj_attr, None) |
|
141 | name = getattr(db_obj, self.obj_attr, None) | |
144 | if not checker(name, self.__class__.__name__, **self.extra_kwargs): |
|
142 | if not checker(name, self.__class__.__name__, **self.extra_kwargs): | |
145 | continue |
|
143 | continue | |
146 |
|
144 | |||
147 | yield db_obj |
|
145 | yield db_obj | |
148 |
|
146 | |||
149 |
|
147 | |||
150 | class RepoList(_PermCheckIterator): |
|
148 | class RepoList(_PermCheckIterator): | |
151 |
|
149 | |||
152 | def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None): |
|
150 | def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None): | |
153 | if not perm_set: |
|
151 | if not perm_set: | |
154 | perm_set = [ |
|
152 | perm_set = [ | |
155 | 'repository.read', 'repository.write', 'repository.admin'] |
|
153 | 'repository.read', 'repository.write', 'repository.admin'] | |
156 |
|
154 | |||
157 | super(RepoList, self).__init__( |
|
155 | super(RepoList, self).__init__( | |
158 | obj_list=db_repo_list, |
|
156 | obj_list=db_repo_list, | |
159 | obj_attr='repo_name', perm_set=perm_set, |
|
157 | obj_attr='repo_name', perm_set=perm_set, | |
160 | perm_checker=HasRepoPermissionAny, |
|
158 | perm_checker=HasRepoPermissionAny, | |
161 | extra_kwargs=extra_kwargs) |
|
159 | extra_kwargs=extra_kwargs) | |
162 |
|
160 | |||
163 |
|
161 | |||
164 | class RepoGroupList(_PermCheckIterator): |
|
162 | class RepoGroupList(_PermCheckIterator): | |
165 |
|
163 | |||
166 | def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None): |
|
164 | def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None): | |
167 | if not perm_set: |
|
165 | if not perm_set: | |
168 | perm_set = ['group.read', 'group.write', 'group.admin'] |
|
166 | perm_set = ['group.read', 'group.write', 'group.admin'] | |
169 |
|
167 | |||
170 | super(RepoGroupList, self).__init__( |
|
168 | super(RepoGroupList, self).__init__( | |
171 | obj_list=db_repo_group_list, |
|
169 | obj_list=db_repo_group_list, | |
172 | obj_attr='group_name', perm_set=perm_set, |
|
170 | obj_attr='group_name', perm_set=perm_set, | |
173 | perm_checker=HasRepoGroupPermissionAny, |
|
171 | perm_checker=HasRepoGroupPermissionAny, | |
174 | extra_kwargs=extra_kwargs) |
|
172 | extra_kwargs=extra_kwargs) | |
175 |
|
173 | |||
176 |
|
174 | |||
177 | class UserGroupList(_PermCheckIterator): |
|
175 | class UserGroupList(_PermCheckIterator): | |
178 |
|
176 | |||
179 | def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None): |
|
177 | def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None): | |
180 | if not perm_set: |
|
178 | if not perm_set: | |
181 | perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin'] |
|
179 | perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin'] | |
182 |
|
180 | |||
183 | super(UserGroupList, self).__init__( |
|
181 | super(UserGroupList, self).__init__( | |
184 | obj_list=db_user_group_list, |
|
182 | obj_list=db_user_group_list, | |
185 | obj_attr='users_group_name', perm_set=perm_set, |
|
183 | obj_attr='users_group_name', perm_set=perm_set, | |
186 | perm_checker=HasUserGroupPermissionAny, |
|
184 | perm_checker=HasUserGroupPermissionAny, | |
187 | extra_kwargs=extra_kwargs) |
|
185 | extra_kwargs=extra_kwargs) | |
188 |
|
186 | |||
189 |
|
187 | |||
190 | class ScmModel(BaseModel): |
|
188 | class ScmModel(BaseModel): | |
191 | """ |
|
189 | """ | |
192 | Generic Scm Model |
|
190 | Generic Scm Model | |
193 | """ |
|
191 | """ | |
194 |
|
192 | |||
195 | @LazyProperty |
|
193 | @LazyProperty | |
196 | def repos_path(self): |
|
194 | def repos_path(self): | |
197 | """ |
|
195 | """ | |
198 | Gets the repositories root path from database |
|
196 | Gets the repositories root path from database | |
199 | """ |
|
197 | """ | |
200 |
|
198 | |||
201 | settings_model = VcsSettingsModel(sa=self.sa) |
|
199 | settings_model = VcsSettingsModel(sa=self.sa) | |
202 | return settings_model.get_repos_location() |
|
200 | return settings_model.get_repos_location() | |
203 |
|
201 | |||
204 | def repo_scan(self, repos_path=None): |
|
202 | def repo_scan(self, repos_path=None): | |
205 | """ |
|
203 | """ | |
206 | Listing of repositories in given path. This path should not be a |
|
204 | Listing of repositories in given path. This path should not be a | |
207 | repository itself. Return a dictionary of repository objects |
|
205 | repository itself. Return a dictionary of repository objects | |
208 |
|
206 | |||
209 | :param repos_path: path to directory containing repositories |
|
207 | :param repos_path: path to directory containing repositories | |
210 | """ |
|
208 | """ | |
211 |
|
209 | |||
212 | if repos_path is None: |
|
210 | if repos_path is None: | |
213 | repos_path = self.repos_path |
|
211 | repos_path = self.repos_path | |
214 |
|
212 | |||
215 | log.info('scanning for repositories in %s', repos_path) |
|
213 | log.info('scanning for repositories in %s', repos_path) | |
216 |
|
214 | |||
217 | config = make_db_config() |
|
215 | config = make_db_config() | |
218 | config.set('extensions', 'largefiles', '') |
|
216 | config.set('extensions', 'largefiles', '') | |
219 | repos = {} |
|
217 | repos = {} | |
220 |
|
218 | |||
221 | for name, path in get_filesystem_repos(repos_path, recursive=True): |
|
219 | for name, path in get_filesystem_repos(repos_path, recursive=True): | |
222 | # name need to be decomposed and put back together using the / |
|
220 | # name need to be decomposed and put back together using the / | |
223 | # since this is internal storage separator for rhodecode |
|
221 | # since this is internal storage separator for rhodecode | |
224 | name = Repository.normalize_repo_name(name) |
|
222 | name = Repository.normalize_repo_name(name) | |
225 |
|
223 | |||
226 | try: |
|
224 | try: | |
227 | if name in repos: |
|
225 | if name in repos: | |
228 | raise RepositoryError('Duplicate repository name %s ' |
|
226 | raise RepositoryError('Duplicate repository name %s ' | |
229 | 'found in %s' % (name, path)) |
|
227 | 'found in %s' % (name, path)) | |
230 | elif path[0] in rhodecode.BACKENDS: |
|
228 | elif path[0] in rhodecode.BACKENDS: | |
231 | klass = get_backend(path[0]) |
|
229 | klass = get_backend(path[0]) | |
232 | repos[name] = klass(path[1], config=config) |
|
230 | repos[name] = klass(path[1], config=config) | |
233 | except OSError: |
|
231 | except OSError: | |
234 | continue |
|
232 | continue | |
235 | log.debug('found %s paths with repositories', len(repos)) |
|
233 | log.debug('found %s paths with repositories', len(repos)) | |
236 | return repos |
|
234 | return repos | |
237 |
|
235 | |||
238 | def get_repos(self, all_repos=None, sort_key=None): |
|
236 | def get_repos(self, all_repos=None, sort_key=None): | |
239 | """ |
|
237 | """ | |
240 | Get all repositories from db and for each repo create it's |
|
238 | Get all repositories from db and for each repo create it's | |
241 | backend instance and fill that backed with information from database |
|
239 | backend instance and fill that backed with information from database | |
242 |
|
240 | |||
243 | :param all_repos: list of repository names as strings |
|
241 | :param all_repos: list of repository names as strings | |
244 | give specific repositories list, good for filtering |
|
242 | give specific repositories list, good for filtering | |
245 |
|
243 | |||
246 | :param sort_key: initial sorting of repositories |
|
244 | :param sort_key: initial sorting of repositories | |
247 | """ |
|
245 | """ | |
248 | if all_repos is None: |
|
246 | if all_repos is None: | |
249 | all_repos = self.sa.query(Repository)\ |
|
247 | all_repos = self.sa.query(Repository)\ | |
250 | .filter(Repository.group_id == None)\ |
|
248 | .filter(Repository.group_id == None)\ | |
251 | .order_by(func.lower(Repository.repo_name)).all() |
|
249 | .order_by(func.lower(Repository.repo_name)).all() | |
252 | repo_iter = SimpleCachedRepoList( |
|
250 | repo_iter = SimpleCachedRepoList( | |
253 | all_repos, repos_path=self.repos_path, order_by=sort_key) |
|
251 | all_repos, repos_path=self.repos_path, order_by=sort_key) | |
254 | return repo_iter |
|
252 | return repo_iter | |
255 |
|
253 | |||
256 | def get_repo_groups(self, all_groups=None): |
|
254 | def get_repo_groups(self, all_groups=None): | |
257 | if all_groups is None: |
|
255 | if all_groups is None: | |
258 | all_groups = RepoGroup.query()\ |
|
256 | all_groups = RepoGroup.query()\ | |
259 | .filter(RepoGroup.group_parent_id == None).all() |
|
257 | .filter(RepoGroup.group_parent_id == None).all() | |
260 | return [x for x in RepoGroupList(all_groups)] |
|
258 | return [x for x in RepoGroupList(all_groups)] | |
261 |
|
259 | |||
262 | def mark_for_invalidation(self, repo_name, delete=False): |
|
260 | def mark_for_invalidation(self, repo_name, delete=False): | |
263 | """ |
|
261 | """ | |
264 | Mark caches of this repo invalid in the database. `delete` flag |
|
262 | Mark caches of this repo invalid in the database. `delete` flag | |
265 | removes the cache entries |
|
263 | removes the cache entries | |
266 |
|
264 | |||
267 | :param repo_name: the repo_name for which caches should be marked |
|
265 | :param repo_name: the repo_name for which caches should be marked | |
268 | invalid, or deleted |
|
266 | invalid, or deleted | |
269 | :param delete: delete the entry keys instead of setting bool |
|
267 | :param delete: delete the entry keys instead of setting bool | |
270 | flag on them, and also purge caches used by the dogpile |
|
268 | flag on them, and also purge caches used by the dogpile | |
271 | """ |
|
269 | """ | |
272 | repo = Repository.get_by_repo_name(repo_name) |
|
270 | repo = Repository.get_by_repo_name(repo_name) | |
273 |
|
271 | |||
274 | if repo: |
|
272 | if repo: | |
275 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( |
|
273 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( | |
276 | repo_id=repo.repo_id) |
|
274 | repo_id=repo.repo_id) | |
277 | CacheKey.set_invalidate(invalidation_namespace, delete=delete) |
|
275 | CacheKey.set_invalidate(invalidation_namespace, delete=delete) | |
278 |
|
276 | |||
279 | repo_id = repo.repo_id |
|
277 | repo_id = repo.repo_id | |
280 | config = repo._config |
|
278 | config = repo._config | |
281 | config.set('extensions', 'largefiles', '') |
|
279 | config.set('extensions', 'largefiles', '') | |
282 | repo.update_commit_cache(config=config, cs_cache=None) |
|
280 | repo.update_commit_cache(config=config, cs_cache=None) | |
283 | if delete: |
|
281 | if delete: | |
284 | cache_namespace_uid = 'cache_repo.{}'.format(repo_id) |
|
282 | cache_namespace_uid = 'cache_repo.{}'.format(repo_id) | |
285 | rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid) |
|
283 | rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid) | |
286 |
|
284 | |||
287 | def toggle_following_repo(self, follow_repo_id, user_id): |
|
285 | def toggle_following_repo(self, follow_repo_id, user_id): | |
288 |
|
286 | |||
289 | f = self.sa.query(UserFollowing)\ |
|
287 | f = self.sa.query(UserFollowing)\ | |
290 | .filter(UserFollowing.follows_repo_id == follow_repo_id)\ |
|
288 | .filter(UserFollowing.follows_repo_id == follow_repo_id)\ | |
291 | .filter(UserFollowing.user_id == user_id).scalar() |
|
289 | .filter(UserFollowing.user_id == user_id).scalar() | |
292 |
|
290 | |||
293 | if f is not None: |
|
291 | if f is not None: | |
294 | try: |
|
292 | try: | |
295 | self.sa.delete(f) |
|
293 | self.sa.delete(f) | |
296 | return |
|
294 | return | |
297 | except Exception: |
|
295 | except Exception: | |
298 | log.error(traceback.format_exc()) |
|
296 | log.error(traceback.format_exc()) | |
299 | raise |
|
297 | raise | |
300 |
|
298 | |||
301 | try: |
|
299 | try: | |
302 | f = UserFollowing() |
|
300 | f = UserFollowing() | |
303 | f.user_id = user_id |
|
301 | f.user_id = user_id | |
304 | f.follows_repo_id = follow_repo_id |
|
302 | f.follows_repo_id = follow_repo_id | |
305 | self.sa.add(f) |
|
303 | self.sa.add(f) | |
306 | except Exception: |
|
304 | except Exception: | |
307 | log.error(traceback.format_exc()) |
|
305 | log.error(traceback.format_exc()) | |
308 | raise |
|
306 | raise | |
309 |
|
307 | |||
310 | def toggle_following_user(self, follow_user_id, user_id): |
|
308 | def toggle_following_user(self, follow_user_id, user_id): | |
311 | f = self.sa.query(UserFollowing)\ |
|
309 | f = self.sa.query(UserFollowing)\ | |
312 | .filter(UserFollowing.follows_user_id == follow_user_id)\ |
|
310 | .filter(UserFollowing.follows_user_id == follow_user_id)\ | |
313 | .filter(UserFollowing.user_id == user_id).scalar() |
|
311 | .filter(UserFollowing.user_id == user_id).scalar() | |
314 |
|
312 | |||
315 | if f is not None: |
|
313 | if f is not None: | |
316 | try: |
|
314 | try: | |
317 | self.sa.delete(f) |
|
315 | self.sa.delete(f) | |
318 | return |
|
316 | return | |
319 | except Exception: |
|
317 | except Exception: | |
320 | log.error(traceback.format_exc()) |
|
318 | log.error(traceback.format_exc()) | |
321 | raise |
|
319 | raise | |
322 |
|
320 | |||
323 | try: |
|
321 | try: | |
324 | f = UserFollowing() |
|
322 | f = UserFollowing() | |
325 | f.user_id = user_id |
|
323 | f.user_id = user_id | |
326 | f.follows_user_id = follow_user_id |
|
324 | f.follows_user_id = follow_user_id | |
327 | self.sa.add(f) |
|
325 | self.sa.add(f) | |
328 | except Exception: |
|
326 | except Exception: | |
329 | log.error(traceback.format_exc()) |
|
327 | log.error(traceback.format_exc()) | |
330 | raise |
|
328 | raise | |
331 |
|
329 | |||
332 | def is_following_repo(self, repo_name, user_id, cache=False): |
|
330 | def is_following_repo(self, repo_name, user_id, cache=False): | |
333 | r = self.sa.query(Repository)\ |
|
331 | r = self.sa.query(Repository)\ | |
334 | .filter(Repository.repo_name == repo_name).scalar() |
|
332 | .filter(Repository.repo_name == repo_name).scalar() | |
335 |
|
333 | |||
336 | f = self.sa.query(UserFollowing)\ |
|
334 | f = self.sa.query(UserFollowing)\ | |
337 | .filter(UserFollowing.follows_repository == r)\ |
|
335 | .filter(UserFollowing.follows_repository == r)\ | |
338 | .filter(UserFollowing.user_id == user_id).scalar() |
|
336 | .filter(UserFollowing.user_id == user_id).scalar() | |
339 |
|
337 | |||
340 | return f is not None |
|
338 | return f is not None | |
341 |
|
339 | |||
342 | def is_following_user(self, username, user_id, cache=False): |
|
340 | def is_following_user(self, username, user_id, cache=False): | |
343 | u = User.get_by_username(username) |
|
341 | u = User.get_by_username(username) | |
344 |
|
342 | |||
345 | f = self.sa.query(UserFollowing)\ |
|
343 | f = self.sa.query(UserFollowing)\ | |
346 | .filter(UserFollowing.follows_user == u)\ |
|
344 | .filter(UserFollowing.follows_user == u)\ | |
347 | .filter(UserFollowing.user_id == user_id).scalar() |
|
345 | .filter(UserFollowing.user_id == user_id).scalar() | |
348 |
|
346 | |||
349 | return f is not None |
|
347 | return f is not None | |
350 |
|
348 | |||
351 | def get_followers(self, repo): |
|
349 | def get_followers(self, repo): | |
352 | repo = self._get_repo(repo) |
|
350 | repo = self._get_repo(repo) | |
353 |
|
351 | |||
354 | return self.sa.query(UserFollowing)\ |
|
352 | return self.sa.query(UserFollowing)\ | |
355 | .filter(UserFollowing.follows_repository == repo).count() |
|
353 | .filter(UserFollowing.follows_repository == repo).count() | |
356 |
|
354 | |||
357 | def get_forks(self, repo): |
|
355 | def get_forks(self, repo): | |
358 | repo = self._get_repo(repo) |
|
356 | repo = self._get_repo(repo) | |
359 | return self.sa.query(Repository)\ |
|
357 | return self.sa.query(Repository)\ | |
360 | .filter(Repository.fork == repo).count() |
|
358 | .filter(Repository.fork == repo).count() | |
361 |
|
359 | |||
362 | def get_pull_requests(self, repo): |
|
360 | def get_pull_requests(self, repo): | |
363 | repo = self._get_repo(repo) |
|
361 | repo = self._get_repo(repo) | |
364 | return self.sa.query(PullRequest)\ |
|
362 | return self.sa.query(PullRequest)\ | |
365 | .filter(PullRequest.target_repo == repo)\ |
|
363 | .filter(PullRequest.target_repo == repo)\ | |
366 | .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() |
|
364 | .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() | |
367 |
|
365 | |||
368 | def mark_as_fork(self, repo, fork, user): |
|
366 | def mark_as_fork(self, repo, fork, user): | |
369 | repo = self._get_repo(repo) |
|
367 | repo = self._get_repo(repo) | |
370 | fork = self._get_repo(fork) |
|
368 | fork = self._get_repo(fork) | |
371 | if fork and repo.repo_id == fork.repo_id: |
|
369 | if fork and repo.repo_id == fork.repo_id: | |
372 | raise Exception("Cannot set repository as fork of itself") |
|
370 | raise Exception("Cannot set repository as fork of itself") | |
373 |
|
371 | |||
374 | if fork and repo.repo_type != fork.repo_type: |
|
372 | if fork and repo.repo_type != fork.repo_type: | |
375 | raise RepositoryError( |
|
373 | raise RepositoryError( | |
376 | "Cannot set repository as fork of repository with other type") |
|
374 | "Cannot set repository as fork of repository with other type") | |
377 |
|
375 | |||
378 | repo.fork = fork |
|
376 | repo.fork = fork | |
379 | self.sa.add(repo) |
|
377 | self.sa.add(repo) | |
380 | return repo |
|
378 | return repo | |
381 |
|
379 | |||
382 | def pull_changes(self, repo, username, remote_uri=None): |
|
380 | def pull_changes(self, repo, username, remote_uri=None): | |
383 | dbrepo = self._get_repo(repo) |
|
381 | dbrepo = self._get_repo(repo) | |
384 | remote_uri = remote_uri or dbrepo.clone_uri |
|
382 | remote_uri = remote_uri or dbrepo.clone_uri | |
385 | if not remote_uri: |
|
383 | if not remote_uri: | |
386 | raise Exception("This repository doesn't have a clone uri") |
|
384 | raise Exception("This repository doesn't have a clone uri") | |
387 |
|
385 | |||
388 | repo = dbrepo.scm_instance(cache=False) |
|
386 | repo = dbrepo.scm_instance(cache=False) | |
389 | # TODO: marcink fix this an re-enable since we need common logic |
|
|||
390 | # for hg/git remove hooks so we don't trigger them on fetching |
|
|||
391 | # commits from remote |
|
|||
392 | repo.config.clear_section('hooks') |
|
387 | repo.config.clear_section('hooks') | |
393 |
|
388 | |||
|
389 | try: | |||
|
390 | # NOTE(marcink): add extra validation so we skip invalid urls | |||
|
391 | # this is due this tasks can be executed via scheduler without | |||
|
392 | # proper validation of remote_uri | |||
|
393 | config = make_db_config(clear_session=False) | |||
|
394 | url_validator(remote_uri, dbrepo.repo_type, config) | |||
|
395 | except InvalidCloneUrl: | |||
|
396 | raise | |||
|
397 | ||||
394 | repo_name = dbrepo.repo_name |
|
398 | repo_name = dbrepo.repo_name | |
395 | try: |
|
399 | try: | |
396 | # TODO: we need to make sure those operations call proper hooks ! |
|
400 | # TODO: we need to make sure those operations call proper hooks ! | |
397 | repo.pull(remote_uri) |
|
401 | repo.pull(remote_uri) | |
398 |
|
402 | |||
399 | self.mark_for_invalidation(repo_name) |
|
403 | self.mark_for_invalidation(repo_name) | |
400 | except Exception: |
|
404 | except Exception: | |
401 | log.error(traceback.format_exc()) |
|
405 | log.error(traceback.format_exc()) | |
402 | raise |
|
406 | raise | |
403 |
|
407 | |||
404 | def push_changes(self, repo, username, remote_uri=None): |
|
408 | def push_changes(self, repo, username, remote_uri=None): | |
405 | dbrepo = self._get_repo(repo) |
|
409 | dbrepo = self._get_repo(repo) | |
406 | remote_uri = remote_uri or dbrepo.push_uri |
|
410 | remote_uri = remote_uri or dbrepo.push_uri | |
407 | if not remote_uri: |
|
411 | if not remote_uri: | |
408 | raise Exception("This repository doesn't have a clone uri") |
|
412 | raise Exception("This repository doesn't have a clone uri") | |
409 |
|
413 | |||
410 | repo = dbrepo.scm_instance(cache=False) |
|
414 | repo = dbrepo.scm_instance(cache=False) | |
411 | repo.config.clear_section('hooks') |
|
415 | repo.config.clear_section('hooks') | |
412 |
|
416 | |||
413 | try: |
|
417 | try: | |
|
418 | # NOTE(marcink): add extra validation so we skip invalid urls | |||
|
419 | # this is due this tasks can be executed via scheduler without | |||
|
420 | # proper validation of remote_uri | |||
|
421 | config = make_db_config(clear_session=False) | |||
|
422 | url_validator(remote_uri, dbrepo.repo_type, config) | |||
|
423 | except InvalidCloneUrl: | |||
|
424 | raise | |||
|
425 | ||||
|
426 | try: | |||
414 | repo.push(remote_uri) |
|
427 | repo.push(remote_uri) | |
415 | except Exception: |
|
428 | except Exception: | |
416 | log.error(traceback.format_exc()) |
|
429 | log.error(traceback.format_exc()) | |
417 | raise |
|
430 | raise | |
418 |
|
431 | |||
419 | def commit_change(self, repo, repo_name, commit, user, author, message, |
|
432 | def commit_change(self, repo, repo_name, commit, user, author, message, | |
420 | content, f_path): |
|
433 | content, f_path): | |
421 | """ |
|
434 | """ | |
422 | Commits changes |
|
435 | Commits changes | |
423 |
|
436 | |||
424 | :param repo: SCM instance |
|
437 | :param repo: SCM instance | |
425 |
|
438 | |||
426 | """ |
|
439 | """ | |
427 | user = self._get_user(user) |
|
440 | user = self._get_user(user) | |
428 |
|
441 | |||
429 | # decoding here will force that we have proper encoded values |
|
442 | # decoding here will force that we have proper encoded values | |
430 | # in any other case this will throw exceptions and deny commit |
|
443 | # in any other case this will throw exceptions and deny commit | |
431 | content = safe_str(content) |
|
444 | content = safe_str(content) | |
432 | path = safe_str(f_path) |
|
445 | path = safe_str(f_path) | |
433 | # message and author needs to be unicode |
|
446 | # message and author needs to be unicode | |
434 | # proper backend should then translate that into required type |
|
447 | # proper backend should then translate that into required type | |
435 | message = safe_unicode(message) |
|
448 | message = safe_unicode(message) | |
436 | author = safe_unicode(author) |
|
449 | author = safe_unicode(author) | |
437 | imc = repo.in_memory_commit |
|
450 | imc = repo.in_memory_commit | |
438 | imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path))) |
|
451 | imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path))) | |
439 | try: |
|
452 | try: | |
440 | # TODO: handle pre-push action ! |
|
453 | # TODO: handle pre-push action ! | |
441 | tip = imc.commit( |
|
454 | tip = imc.commit( | |
442 | message=message, author=author, parents=[commit], |
|
455 | message=message, author=author, parents=[commit], | |
443 | branch=commit.branch) |
|
456 | branch=commit.branch) | |
444 | except Exception as e: |
|
457 | except Exception as e: | |
445 | log.error(traceback.format_exc()) |
|
458 | log.error(traceback.format_exc()) | |
446 | raise IMCCommitError(str(e)) |
|
459 | raise IMCCommitError(str(e)) | |
447 | finally: |
|
460 | finally: | |
448 | # always clear caches, if commit fails we want fresh object also |
|
461 | # always clear caches, if commit fails we want fresh object also | |
449 | self.mark_for_invalidation(repo_name) |
|
462 | self.mark_for_invalidation(repo_name) | |
450 |
|
463 | |||
451 | # We trigger the post-push action |
|
464 | # We trigger the post-push action | |
452 | hooks_utils.trigger_post_push_hook( |
|
465 | hooks_utils.trigger_post_push_hook( | |
453 | username=user.username, action='push_local', repo_name=repo_name, |
|
466 | username=user.username, action='push_local', repo_name=repo_name, | |
454 | repo_alias=repo.alias, commit_ids=[tip.raw_id]) |
|
467 | repo_alias=repo.alias, commit_ids=[tip.raw_id]) | |
455 | return tip |
|
468 | return tip | |
456 |
|
469 | |||
457 | def _sanitize_path(self, f_path): |
|
470 | def _sanitize_path(self, f_path): | |
458 | if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path: |
|
471 | if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path: | |
459 | raise NonRelativePathError('%s is not an relative path' % f_path) |
|
472 | raise NonRelativePathError('%s is not an relative path' % f_path) | |
460 | if f_path: |
|
473 | if f_path: | |
461 | f_path = os.path.normpath(f_path) |
|
474 | f_path = os.path.normpath(f_path) | |
462 | return f_path |
|
475 | return f_path | |
463 |
|
476 | |||
464 | def get_dirnode_metadata(self, request, commit, dir_node): |
|
477 | def get_dirnode_metadata(self, request, commit, dir_node): | |
465 | if not dir_node.is_dir(): |
|
478 | if not dir_node.is_dir(): | |
466 | return [] |
|
479 | return [] | |
467 |
|
480 | |||
468 | data = [] |
|
481 | data = [] | |
469 | for node in dir_node: |
|
482 | for node in dir_node: | |
470 | if not node.is_file(): |
|
483 | if not node.is_file(): | |
471 | # we skip file-nodes |
|
484 | # we skip file-nodes | |
472 | continue |
|
485 | continue | |
473 |
|
486 | |||
474 | last_commit = node.last_commit |
|
487 | last_commit = node.last_commit | |
475 | last_commit_date = last_commit.date |
|
488 | last_commit_date = last_commit.date | |
476 | data.append({ |
|
489 | data.append({ | |
477 | 'name': node.name, |
|
490 | 'name': node.name, | |
478 | 'size': h.format_byte_size_binary(node.size), |
|
491 | 'size': h.format_byte_size_binary(node.size), | |
479 | 'modified_at': h.format_date(last_commit_date), |
|
492 | 'modified_at': h.format_date(last_commit_date), | |
480 | 'modified_ts': last_commit_date.isoformat(), |
|
493 | 'modified_ts': last_commit_date.isoformat(), | |
481 | 'revision': last_commit.revision, |
|
494 | 'revision': last_commit.revision, | |
482 | 'short_id': last_commit.short_id, |
|
495 | 'short_id': last_commit.short_id, | |
483 | 'message': h.escape(last_commit.message), |
|
496 | 'message': h.escape(last_commit.message), | |
484 | 'author': h.escape(last_commit.author), |
|
497 | 'author': h.escape(last_commit.author), | |
485 | 'user_profile': h.gravatar_with_user( |
|
498 | 'user_profile': h.gravatar_with_user( | |
486 | request, last_commit.author), |
|
499 | request, last_commit.author), | |
487 | }) |
|
500 | }) | |
488 |
|
501 | |||
489 | return data |
|
502 | return data | |
490 |
|
503 | |||
491 | def get_nodes(self, repo_name, commit_id, root_path='/', flat=True, |
|
504 | def get_nodes(self, repo_name, commit_id, root_path='/', flat=True, | |
492 | extended_info=False, content=False, max_file_bytes=None): |
|
505 | extended_info=False, content=False, max_file_bytes=None): | |
493 | """ |
|
506 | """ | |
494 | recursive walk in root dir and return a set of all path in that dir |
|
507 | recursive walk in root dir and return a set of all path in that dir | |
495 | based on repository walk function |
|
508 | based on repository walk function | |
496 |
|
509 | |||
497 | :param repo_name: name of repository |
|
510 | :param repo_name: name of repository | |
498 | :param commit_id: commit id for which to list nodes |
|
511 | :param commit_id: commit id for which to list nodes | |
499 | :param root_path: root path to list |
|
512 | :param root_path: root path to list | |
500 | :param flat: return as a list, if False returns a dict with description |
|
513 | :param flat: return as a list, if False returns a dict with description | |
501 | :param max_file_bytes: will not return file contents over this limit |
|
514 | :param max_file_bytes: will not return file contents over this limit | |
502 |
|
515 | |||
503 | """ |
|
516 | """ | |
504 | _files = list() |
|
517 | _files = list() | |
505 | _dirs = list() |
|
518 | _dirs = list() | |
506 | try: |
|
519 | try: | |
507 | _repo = self._get_repo(repo_name) |
|
520 | _repo = self._get_repo(repo_name) | |
508 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) |
|
521 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) | |
509 | root_path = root_path.lstrip('/') |
|
522 | root_path = root_path.lstrip('/') | |
510 | for __, dirs, files in commit.walk(root_path): |
|
523 | for __, dirs, files in commit.walk(root_path): | |
511 | for f in files: |
|
524 | for f in files: | |
512 | _content = None |
|
525 | _content = None | |
513 | _data = f.unicode_path |
|
526 | _data = f.unicode_path | |
514 | over_size_limit = (max_file_bytes is not None |
|
527 | over_size_limit = (max_file_bytes is not None | |
515 | and f.size > max_file_bytes) |
|
528 | and f.size > max_file_bytes) | |
516 |
|
529 | |||
517 | if not flat: |
|
530 | if not flat: | |
518 | _data = { |
|
531 | _data = { | |
519 | "name": h.escape(f.unicode_path), |
|
532 | "name": h.escape(f.unicode_path), | |
520 | "type": "file", |
|
533 | "type": "file", | |
521 | } |
|
534 | } | |
522 | if extended_info: |
|
535 | if extended_info: | |
523 | _data.update({ |
|
536 | _data.update({ | |
524 | "md5": f.md5, |
|
537 | "md5": f.md5, | |
525 | "binary": f.is_binary, |
|
538 | "binary": f.is_binary, | |
526 | "size": f.size, |
|
539 | "size": f.size, | |
527 | "extension": f.extension, |
|
540 | "extension": f.extension, | |
528 | "mimetype": f.mimetype, |
|
541 | "mimetype": f.mimetype, | |
529 | "lines": f.lines()[0] |
|
542 | "lines": f.lines()[0] | |
530 | }) |
|
543 | }) | |
531 |
|
544 | |||
532 | if content: |
|
545 | if content: | |
533 | full_content = None |
|
546 | full_content = None | |
534 | if not f.is_binary and not over_size_limit: |
|
547 | if not f.is_binary and not over_size_limit: | |
535 | full_content = safe_str(f.content) |
|
548 | full_content = safe_str(f.content) | |
536 |
|
549 | |||
537 | _data.update({ |
|
550 | _data.update({ | |
538 | "content": full_content, |
|
551 | "content": full_content, | |
539 | }) |
|
552 | }) | |
540 | _files.append(_data) |
|
553 | _files.append(_data) | |
541 | for d in dirs: |
|
554 | for d in dirs: | |
542 | _data = d.unicode_path |
|
555 | _data = d.unicode_path | |
543 | if not flat: |
|
556 | if not flat: | |
544 | _data = { |
|
557 | _data = { | |
545 | "name": h.escape(d.unicode_path), |
|
558 | "name": h.escape(d.unicode_path), | |
546 | "type": "dir", |
|
559 | "type": "dir", | |
547 | } |
|
560 | } | |
548 | if extended_info: |
|
561 | if extended_info: | |
549 | _data.update({ |
|
562 | _data.update({ | |
550 | "md5": None, |
|
563 | "md5": None, | |
551 | "binary": None, |
|
564 | "binary": None, | |
552 | "size": None, |
|
565 | "size": None, | |
553 | "extension": None, |
|
566 | "extension": None, | |
554 | }) |
|
567 | }) | |
555 | if content: |
|
568 | if content: | |
556 | _data.update({ |
|
569 | _data.update({ | |
557 | "content": None |
|
570 | "content": None | |
558 | }) |
|
571 | }) | |
559 | _dirs.append(_data) |
|
572 | _dirs.append(_data) | |
560 | except RepositoryError: |
|
573 | except RepositoryError: | |
561 | log.debug("Exception in get_nodes", exc_info=True) |
|
574 | log.debug("Exception in get_nodes", exc_info=True) | |
562 | raise |
|
575 | raise | |
563 |
|
576 | |||
564 | return _dirs, _files |
|
577 | return _dirs, _files | |
565 |
|
578 | |||
566 | def create_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
579 | def create_nodes(self, user, repo, message, nodes, parent_commit=None, | |
567 | author=None, trigger_push_hook=True): |
|
580 | author=None, trigger_push_hook=True): | |
568 | """ |
|
581 | """ | |
569 | Commits given multiple nodes into repo |
|
582 | Commits given multiple nodes into repo | |
570 |
|
583 | |||
571 | :param user: RhodeCode User object or user_id, the commiter |
|
584 | :param user: RhodeCode User object or user_id, the commiter | |
572 | :param repo: RhodeCode Repository object |
|
585 | :param repo: RhodeCode Repository object | |
573 | :param message: commit message |
|
586 | :param message: commit message | |
574 | :param nodes: mapping {filename:{'content':content},...} |
|
587 | :param nodes: mapping {filename:{'content':content},...} | |
575 | :param parent_commit: parent commit, can be empty than it's |
|
588 | :param parent_commit: parent commit, can be empty than it's | |
576 | initial commit |
|
589 | initial commit | |
577 | :param author: author of commit, cna be different that commiter |
|
590 | :param author: author of commit, cna be different that commiter | |
578 | only for git |
|
591 | only for git | |
579 | :param trigger_push_hook: trigger push hooks |
|
592 | :param trigger_push_hook: trigger push hooks | |
580 |
|
593 | |||
581 | :returns: new commited commit |
|
594 | :returns: new commited commit | |
582 | """ |
|
595 | """ | |
583 |
|
596 | |||
584 | user = self._get_user(user) |
|
597 | user = self._get_user(user) | |
585 | scm_instance = repo.scm_instance(cache=False) |
|
598 | scm_instance = repo.scm_instance(cache=False) | |
586 |
|
599 | |||
587 | processed_nodes = [] |
|
600 | processed_nodes = [] | |
588 | for f_path in nodes: |
|
601 | for f_path in nodes: | |
589 | f_path = self._sanitize_path(f_path) |
|
602 | f_path = self._sanitize_path(f_path) | |
590 | content = nodes[f_path]['content'] |
|
603 | content = nodes[f_path]['content'] | |
591 | f_path = safe_str(f_path) |
|
604 | f_path = safe_str(f_path) | |
592 | # decoding here will force that we have proper encoded values |
|
605 | # decoding here will force that we have proper encoded values | |
593 | # in any other case this will throw exceptions and deny commit |
|
606 | # in any other case this will throw exceptions and deny commit | |
594 | if isinstance(content, (basestring,)): |
|
607 | if isinstance(content, (basestring,)): | |
595 | content = safe_str(content) |
|
608 | content = safe_str(content) | |
596 | elif isinstance(content, (file, cStringIO.OutputType,)): |
|
609 | elif isinstance(content, (file, cStringIO.OutputType,)): | |
597 | content = content.read() |
|
610 | content = content.read() | |
598 | else: |
|
611 | else: | |
599 | raise Exception('Content is of unrecognized type %s' % ( |
|
612 | raise Exception('Content is of unrecognized type %s' % ( | |
600 | type(content) |
|
613 | type(content) | |
601 | )) |
|
614 | )) | |
602 | processed_nodes.append((f_path, content)) |
|
615 | processed_nodes.append((f_path, content)) | |
603 |
|
616 | |||
604 | message = safe_unicode(message) |
|
617 | message = safe_unicode(message) | |
605 | commiter = user.full_contact |
|
618 | commiter = user.full_contact | |
606 | author = safe_unicode(author) if author else commiter |
|
619 | author = safe_unicode(author) if author else commiter | |
607 |
|
620 | |||
608 | imc = scm_instance.in_memory_commit |
|
621 | imc = scm_instance.in_memory_commit | |
609 |
|
622 | |||
610 | if not parent_commit: |
|
623 | if not parent_commit: | |
611 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
624 | parent_commit = EmptyCommit(alias=scm_instance.alias) | |
612 |
|
625 | |||
613 | if isinstance(parent_commit, EmptyCommit): |
|
626 | if isinstance(parent_commit, EmptyCommit): | |
614 | # EmptyCommit means we we're editing empty repository |
|
627 | # EmptyCommit means we we're editing empty repository | |
615 | parents = None |
|
628 | parents = None | |
616 | else: |
|
629 | else: | |
617 | parents = [parent_commit] |
|
630 | parents = [parent_commit] | |
618 | # add multiple nodes |
|
631 | # add multiple nodes | |
619 | for path, content in processed_nodes: |
|
632 | for path, content in processed_nodes: | |
620 | imc.add(FileNode(path, content=content)) |
|
633 | imc.add(FileNode(path, content=content)) | |
621 | # TODO: handle pre push scenario |
|
634 | # TODO: handle pre push scenario | |
622 | tip = imc.commit(message=message, |
|
635 | tip = imc.commit(message=message, | |
623 | author=author, |
|
636 | author=author, | |
624 | parents=parents, |
|
637 | parents=parents, | |
625 | branch=parent_commit.branch) |
|
638 | branch=parent_commit.branch) | |
626 |
|
639 | |||
627 | self.mark_for_invalidation(repo.repo_name) |
|
640 | self.mark_for_invalidation(repo.repo_name) | |
628 | if trigger_push_hook: |
|
641 | if trigger_push_hook: | |
629 | hooks_utils.trigger_post_push_hook( |
|
642 | hooks_utils.trigger_post_push_hook( | |
630 | username=user.username, action='push_local', |
|
643 | username=user.username, action='push_local', | |
631 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, |
|
644 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, | |
632 | commit_ids=[tip.raw_id]) |
|
645 | commit_ids=[tip.raw_id]) | |
633 | return tip |
|
646 | return tip | |
634 |
|
647 | |||
635 | def update_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
648 | def update_nodes(self, user, repo, message, nodes, parent_commit=None, | |
636 | author=None, trigger_push_hook=True): |
|
649 | author=None, trigger_push_hook=True): | |
637 | user = self._get_user(user) |
|
650 | user = self._get_user(user) | |
638 | scm_instance = repo.scm_instance(cache=False) |
|
651 | scm_instance = repo.scm_instance(cache=False) | |
639 |
|
652 | |||
640 | message = safe_unicode(message) |
|
653 | message = safe_unicode(message) | |
641 | commiter = user.full_contact |
|
654 | commiter = user.full_contact | |
642 | author = safe_unicode(author) if author else commiter |
|
655 | author = safe_unicode(author) if author else commiter | |
643 |
|
656 | |||
644 | imc = scm_instance.in_memory_commit |
|
657 | imc = scm_instance.in_memory_commit | |
645 |
|
658 | |||
646 | if not parent_commit: |
|
659 | if not parent_commit: | |
647 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
660 | parent_commit = EmptyCommit(alias=scm_instance.alias) | |
648 |
|
661 | |||
649 | if isinstance(parent_commit, EmptyCommit): |
|
662 | if isinstance(parent_commit, EmptyCommit): | |
650 | # EmptyCommit means we we're editing empty repository |
|
663 | # EmptyCommit means we we're editing empty repository | |
651 | parents = None |
|
664 | parents = None | |
652 | else: |
|
665 | else: | |
653 | parents = [parent_commit] |
|
666 | parents = [parent_commit] | |
654 |
|
667 | |||
655 | # add multiple nodes |
|
668 | # add multiple nodes | |
656 | for _filename, data in nodes.items(): |
|
669 | for _filename, data in nodes.items(): | |
657 | # new filename, can be renamed from the old one, also sanitaze |
|
670 | # new filename, can be renamed from the old one, also sanitaze | |
658 | # the path for any hack around relative paths like ../../ etc. |
|
671 | # the path for any hack around relative paths like ../../ etc. | |
659 | filename = self._sanitize_path(data['filename']) |
|
672 | filename = self._sanitize_path(data['filename']) | |
660 | old_filename = self._sanitize_path(_filename) |
|
673 | old_filename = self._sanitize_path(_filename) | |
661 | content = data['content'] |
|
674 | content = data['content'] | |
662 |
|
675 | |||
663 | filenode = FileNode(old_filename, content=content) |
|
676 | filenode = FileNode(old_filename, content=content) | |
664 | op = data['op'] |
|
677 | op = data['op'] | |
665 | if op == 'add': |
|
678 | if op == 'add': | |
666 | imc.add(filenode) |
|
679 | imc.add(filenode) | |
667 | elif op == 'del': |
|
680 | elif op == 'del': | |
668 | imc.remove(filenode) |
|
681 | imc.remove(filenode) | |
669 | elif op == 'mod': |
|
682 | elif op == 'mod': | |
670 | if filename != old_filename: |
|
683 | if filename != old_filename: | |
671 | # TODO: handle renames more efficient, needs vcs lib |
|
684 | # TODO: handle renames more efficient, needs vcs lib | |
672 | # changes |
|
685 | # changes | |
673 | imc.remove(filenode) |
|
686 | imc.remove(filenode) | |
674 | imc.add(FileNode(filename, content=content)) |
|
687 | imc.add(FileNode(filename, content=content)) | |
675 | else: |
|
688 | else: | |
676 | imc.change(filenode) |
|
689 | imc.change(filenode) | |
677 |
|
690 | |||
678 | try: |
|
691 | try: | |
679 | # TODO: handle pre push scenario |
|
692 | # TODO: handle pre push scenario | |
680 | # commit changes |
|
693 | # commit changes | |
681 | tip = imc.commit(message=message, |
|
694 | tip = imc.commit(message=message, | |
682 | author=author, |
|
695 | author=author, | |
683 | parents=parents, |
|
696 | parents=parents, | |
684 | branch=parent_commit.branch) |
|
697 | branch=parent_commit.branch) | |
685 | except NodeNotChangedError: |
|
698 | except NodeNotChangedError: | |
686 | raise |
|
699 | raise | |
687 | except Exception as e: |
|
700 | except Exception as e: | |
688 | log.exception("Unexpected exception during call to imc.commit") |
|
701 | log.exception("Unexpected exception during call to imc.commit") | |
689 | raise IMCCommitError(str(e)) |
|
702 | raise IMCCommitError(str(e)) | |
690 | finally: |
|
703 | finally: | |
691 | # always clear caches, if commit fails we want fresh object also |
|
704 | # always clear caches, if commit fails we want fresh object also | |
692 | self.mark_for_invalidation(repo.repo_name) |
|
705 | self.mark_for_invalidation(repo.repo_name) | |
693 |
|
706 | |||
694 | if trigger_push_hook: |
|
707 | if trigger_push_hook: | |
695 | hooks_utils.trigger_post_push_hook( |
|
708 | hooks_utils.trigger_post_push_hook( | |
696 | username=user.username, action='push_local', |
|
709 | username=user.username, action='push_local', | |
697 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, |
|
710 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, | |
698 | commit_ids=[tip.raw_id]) |
|
711 | commit_ids=[tip.raw_id]) | |
699 |
|
712 | |||
700 | def delete_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
713 | def delete_nodes(self, user, repo, message, nodes, parent_commit=None, | |
701 | author=None, trigger_push_hook=True): |
|
714 | author=None, trigger_push_hook=True): | |
702 | """ |
|
715 | """ | |
703 | Deletes given multiple nodes into `repo` |
|
716 | Deletes given multiple nodes into `repo` | |
704 |
|
717 | |||
705 | :param user: RhodeCode User object or user_id, the committer |
|
718 | :param user: RhodeCode User object or user_id, the committer | |
706 | :param repo: RhodeCode Repository object |
|
719 | :param repo: RhodeCode Repository object | |
707 | :param message: commit message |
|
720 | :param message: commit message | |
708 | :param nodes: mapping {filename:{'content':content},...} |
|
721 | :param nodes: mapping {filename:{'content':content},...} | |
709 | :param parent_commit: parent commit, can be empty than it's initial |
|
722 | :param parent_commit: parent commit, can be empty than it's initial | |
710 | commit |
|
723 | commit | |
711 | :param author: author of commit, cna be different that commiter only |
|
724 | :param author: author of commit, cna be different that commiter only | |
712 | for git |
|
725 | for git | |
713 | :param trigger_push_hook: trigger push hooks |
|
726 | :param trigger_push_hook: trigger push hooks | |
714 |
|
727 | |||
715 | :returns: new commit after deletion |
|
728 | :returns: new commit after deletion | |
716 | """ |
|
729 | """ | |
717 |
|
730 | |||
718 | user = self._get_user(user) |
|
731 | user = self._get_user(user) | |
719 | scm_instance = repo.scm_instance(cache=False) |
|
732 | scm_instance = repo.scm_instance(cache=False) | |
720 |
|
733 | |||
721 | processed_nodes = [] |
|
734 | processed_nodes = [] | |
722 | for f_path in nodes: |
|
735 | for f_path in nodes: | |
723 | f_path = self._sanitize_path(f_path) |
|
736 | f_path = self._sanitize_path(f_path) | |
724 | # content can be empty but for compatabilty it allows same dicts |
|
737 | # content can be empty but for compatabilty it allows same dicts | |
725 | # structure as add_nodes |
|
738 | # structure as add_nodes | |
726 | content = nodes[f_path].get('content') |
|
739 | content = nodes[f_path].get('content') | |
727 | processed_nodes.append((f_path, content)) |
|
740 | processed_nodes.append((f_path, content)) | |
728 |
|
741 | |||
729 | message = safe_unicode(message) |
|
742 | message = safe_unicode(message) | |
730 | commiter = user.full_contact |
|
743 | commiter = user.full_contact | |
731 | author = safe_unicode(author) if author else commiter |
|
744 | author = safe_unicode(author) if author else commiter | |
732 |
|
745 | |||
733 | imc = scm_instance.in_memory_commit |
|
746 | imc = scm_instance.in_memory_commit | |
734 |
|
747 | |||
735 | if not parent_commit: |
|
748 | if not parent_commit: | |
736 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
749 | parent_commit = EmptyCommit(alias=scm_instance.alias) | |
737 |
|
750 | |||
738 | if isinstance(parent_commit, EmptyCommit): |
|
751 | if isinstance(parent_commit, EmptyCommit): | |
739 | # EmptyCommit means we we're editing empty repository |
|
752 | # EmptyCommit means we we're editing empty repository | |
740 | parents = None |
|
753 | parents = None | |
741 | else: |
|
754 | else: | |
742 | parents = [parent_commit] |
|
755 | parents = [parent_commit] | |
743 | # add multiple nodes |
|
756 | # add multiple nodes | |
744 | for path, content in processed_nodes: |
|
757 | for path, content in processed_nodes: | |
745 | imc.remove(FileNode(path, content=content)) |
|
758 | imc.remove(FileNode(path, content=content)) | |
746 |
|
759 | |||
747 | # TODO: handle pre push scenario |
|
760 | # TODO: handle pre push scenario | |
748 | tip = imc.commit(message=message, |
|
761 | tip = imc.commit(message=message, | |
749 | author=author, |
|
762 | author=author, | |
750 | parents=parents, |
|
763 | parents=parents, | |
751 | branch=parent_commit.branch) |
|
764 | branch=parent_commit.branch) | |
752 |
|
765 | |||
753 | self.mark_for_invalidation(repo.repo_name) |
|
766 | self.mark_for_invalidation(repo.repo_name) | |
754 | if trigger_push_hook: |
|
767 | if trigger_push_hook: | |
755 | hooks_utils.trigger_post_push_hook( |
|
768 | hooks_utils.trigger_post_push_hook( | |
756 | username=user.username, action='push_local', |
|
769 | username=user.username, action='push_local', | |
757 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, |
|
770 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, | |
758 | commit_ids=[tip.raw_id]) |
|
771 | commit_ids=[tip.raw_id]) | |
759 | return tip |
|
772 | return tip | |
760 |
|
773 | |||
761 | def strip(self, repo, commit_id, branch): |
|
774 | def strip(self, repo, commit_id, branch): | |
762 | scm_instance = repo.scm_instance(cache=False) |
|
775 | scm_instance = repo.scm_instance(cache=False) | |
763 | scm_instance.config.clear_section('hooks') |
|
776 | scm_instance.config.clear_section('hooks') | |
764 | scm_instance.strip(commit_id, branch) |
|
777 | scm_instance.strip(commit_id, branch) | |
765 | self.mark_for_invalidation(repo.repo_name) |
|
778 | self.mark_for_invalidation(repo.repo_name) | |
766 |
|
779 | |||
767 | def get_unread_journal(self): |
|
780 | def get_unread_journal(self): | |
768 | return self.sa.query(UserLog).count() |
|
781 | return self.sa.query(UserLog).count() | |
769 |
|
782 | |||
770 | def get_repo_landing_revs(self, translator, repo=None): |
|
783 | def get_repo_landing_revs(self, translator, repo=None): | |
771 | """ |
|
784 | """ | |
772 | Generates select option with tags branches and bookmarks (for hg only) |
|
785 | Generates select option with tags branches and bookmarks (for hg only) | |
773 | grouped by type |
|
786 | grouped by type | |
774 |
|
787 | |||
775 | :param repo: |
|
788 | :param repo: | |
776 | """ |
|
789 | """ | |
777 | _ = translator |
|
790 | _ = translator | |
778 | repo = self._get_repo(repo) |
|
791 | repo = self._get_repo(repo) | |
779 |
|
792 | |||
780 | hist_l = [ |
|
793 | hist_l = [ | |
781 | ['rev:tip', _('latest tip')] |
|
794 | ['rev:tip', _('latest tip')] | |
782 | ] |
|
795 | ] | |
783 | choices = [ |
|
796 | choices = [ | |
784 | 'rev:tip' |
|
797 | 'rev:tip' | |
785 | ] |
|
798 | ] | |
786 |
|
799 | |||
787 | if not repo: |
|
800 | if not repo: | |
788 | return choices, hist_l |
|
801 | return choices, hist_l | |
789 |
|
802 | |||
790 | repo = repo.scm_instance() |
|
803 | repo = repo.scm_instance() | |
791 |
|
804 | |||
792 | branches_group = ( |
|
805 | branches_group = ( | |
793 | [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) |
|
806 | [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) | |
794 | for b in repo.branches], |
|
807 | for b in repo.branches], | |
795 | _("Branches")) |
|
808 | _("Branches")) | |
796 | hist_l.append(branches_group) |
|
809 | hist_l.append(branches_group) | |
797 | choices.extend([x[0] for x in branches_group[0]]) |
|
810 | choices.extend([x[0] for x in branches_group[0]]) | |
798 |
|
811 | |||
799 | if repo.alias == 'hg': |
|
812 | if repo.alias == 'hg': | |
800 | bookmarks_group = ( |
|
813 | bookmarks_group = ( | |
801 | [(u'book:%s' % safe_unicode(b), safe_unicode(b)) |
|
814 | [(u'book:%s' % safe_unicode(b), safe_unicode(b)) | |
802 | for b in repo.bookmarks], |
|
815 | for b in repo.bookmarks], | |
803 | _("Bookmarks")) |
|
816 | _("Bookmarks")) | |
804 | hist_l.append(bookmarks_group) |
|
817 | hist_l.append(bookmarks_group) | |
805 | choices.extend([x[0] for x in bookmarks_group[0]]) |
|
818 | choices.extend([x[0] for x in bookmarks_group[0]]) | |
806 |
|
819 | |||
807 | tags_group = ( |
|
820 | tags_group = ( | |
808 | [(u'tag:%s' % safe_unicode(t), safe_unicode(t)) |
|
821 | [(u'tag:%s' % safe_unicode(t), safe_unicode(t)) | |
809 | for t in repo.tags], |
|
822 | for t in repo.tags], | |
810 | _("Tags")) |
|
823 | _("Tags")) | |
811 | hist_l.append(tags_group) |
|
824 | hist_l.append(tags_group) | |
812 | choices.extend([x[0] for x in tags_group[0]]) |
|
825 | choices.extend([x[0] for x in tags_group[0]]) | |
813 |
|
826 | |||
814 | return choices, hist_l |
|
827 | return choices, hist_l | |
815 |
|
828 | |||
816 | def get_server_info(self, environ=None): |
|
829 | def get_server_info(self, environ=None): | |
817 | server_info = get_system_info(environ) |
|
830 | server_info = get_system_info(environ) | |
818 | return server_info |
|
831 | return server_info |
General Comments 0
You need to be logged in to leave comments.
Login now