Show More
@@ -1,248 +1,263 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | |
|
23 | 23 | from pyramid.view import view_config |
|
24 | 24 | from pyramid.httpexceptions import HTTPFound |
|
25 | 25 | |
|
26 | 26 | from rhodecode.apps._base import RepoAppView |
|
27 | 27 | from rhodecode.lib import helpers as h |
|
28 | 28 | from rhodecode.lib import audit_logger |
|
29 | 29 | from rhodecode.lib.auth import ( |
|
30 | 30 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired, |
|
31 | 31 | HasRepoPermissionAny) |
|
32 | from rhodecode.lib.exceptions import AttachedForksError | |
|
32 | from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError | |
|
33 | 33 | from rhodecode.lib.utils2 import safe_int |
|
34 | 34 | from rhodecode.lib.vcs import RepositoryError |
|
35 | 35 | from rhodecode.model.db import Session, UserFollowing, User, Repository |
|
36 | 36 | from rhodecode.model.repo import RepoModel |
|
37 | 37 | from rhodecode.model.scm import ScmModel |
|
38 | 38 | |
|
39 | 39 | log = logging.getLogger(__name__) |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | class RepoSettingsView(RepoAppView): |
|
43 | 43 | |
|
44 | 44 | def load_default_context(self): |
|
45 | 45 | c = self._get_local_tmpl_context() |
|
46 | 46 | return c |
|
47 | 47 | |
|
48 | 48 | @LoginRequired() |
|
49 | 49 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
50 | 50 | @view_config( |
|
51 | 51 | route_name='edit_repo_advanced', request_method='GET', |
|
52 | 52 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
53 | 53 | def edit_advanced(self): |
|
54 | 54 | c = self.load_default_context() |
|
55 | 55 | c.active = 'advanced' |
|
56 | 56 | |
|
57 | 57 | c.default_user_id = User.get_default_user().user_id |
|
58 | 58 | c.in_public_journal = UserFollowing.query() \ |
|
59 | 59 | .filter(UserFollowing.user_id == c.default_user_id) \ |
|
60 | 60 | .filter(UserFollowing.follows_repository == self.db_repo).scalar() |
|
61 | 61 | |
|
62 | 62 | c.has_origin_repo_read_perm = False |
|
63 | 63 | if self.db_repo.fork: |
|
64 | 64 | c.has_origin_repo_read_perm = h.HasRepoPermissionAny( |
|
65 | 65 | 'repository.write', 'repository.read', 'repository.admin')( |
|
66 | 66 | self.db_repo.fork.repo_name, 'repo set as fork page') |
|
67 | 67 | |
|
68 | 68 | return self._get_template_context(c) |
|
69 | 69 | |
|
70 | 70 | @LoginRequired() |
|
71 | 71 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
72 | 72 | @CSRFRequired() |
|
73 | 73 | @view_config( |
|
74 | 74 | route_name='edit_repo_advanced_delete', request_method='POST', |
|
75 | 75 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
76 | 76 | def edit_advanced_delete(self): |
|
77 | 77 | """ |
|
78 | 78 | Deletes the repository, or shows warnings if deletion is not possible |
|
79 | 79 | because of attached forks or other errors. |
|
80 | 80 | """ |
|
81 | 81 | _ = self.request.translate |
|
82 | 82 | handle_forks = self.request.POST.get('forks', None) |
|
83 | if handle_forks == 'detach_forks': | |
|
84 | handle_forks = 'detach' | |
|
85 | elif handle_forks == 'delete_forks': | |
|
86 | handle_forks = 'delete' | |
|
83 | 87 | |
|
84 | 88 | try: |
|
89 | old_data = self.db_repo.get_api_data() | |
|
90 | RepoModel().delete(self.db_repo, forks=handle_forks) | |
|
91 | ||
|
85 | 92 | _forks = self.db_repo.forks.count() |
|
86 | 93 | if _forks and handle_forks: |
|
87 | 94 | if handle_forks == 'detach_forks': |
|
88 | handle_forks = 'detach' | |
|
89 | 95 | h.flash(_('Detached %s forks') % _forks, category='success') |
|
90 | 96 | elif handle_forks == 'delete_forks': |
|
91 | handle_forks = 'delete' | |
|
92 | 97 | h.flash(_('Deleted %s forks') % _forks, category='success') |
|
93 | 98 | |
|
94 | old_data = self.db_repo.get_api_data() | |
|
95 | RepoModel().delete(self.db_repo, forks=handle_forks) | |
|
96 | ||
|
97 | repo = audit_logger.RepoWrap(repo_id=None, | |
|
98 | repo_name=self.db_repo.repo_name) | |
|
99 | repo = audit_logger.RepoWrap(repo_id=None, repo_name=self.db_repo.repo_name) | |
|
99 | 100 | audit_logger.store_web( |
|
100 | 101 | 'repo.delete', action_data={'old_data': old_data}, |
|
101 | 102 | user=self._rhodecode_user, repo=repo) |
|
102 | 103 | |
|
103 | 104 | ScmModel().mark_for_invalidation(self.db_repo_name, delete=True) |
|
104 | 105 | h.flash( |
|
105 | 106 | _('Deleted repository `%s`') % self.db_repo_name, |
|
106 | 107 | category='success') |
|
107 | 108 | Session().commit() |
|
108 | 109 | except AttachedForksError: |
|
109 | 110 | repo_advanced_url = h.route_path( |
|
110 | 111 | 'edit_repo_advanced', repo_name=self.db_repo_name, |
|
111 | 112 | _anchor='advanced-delete') |
|
112 | 113 | delete_anchor = h.link_to(_('detach or delete'), repo_advanced_url) |
|
113 | 114 | h.flash(_('Cannot delete `{repo}` it still contains attached forks. ' |
|
114 | 115 | 'Try using {delete_or_detach} option.') |
|
115 | 116 | .format(repo=self.db_repo_name, delete_or_detach=delete_anchor), |
|
116 | 117 | category='warning') |
|
117 | 118 | |
|
118 | 119 | # redirect to advanced for forks handle action ? |
|
119 | 120 | raise HTTPFound(repo_advanced_url) |
|
120 | 121 | |
|
122 | except AttachedPullRequestsError: | |
|
123 | repo_advanced_url = h.route_path( | |
|
124 | 'edit_repo_advanced', repo_name=self.db_repo_name, | |
|
125 | _anchor='advanced-delete') | |
|
126 | attached_prs = len(self.db_repo.pull_requests_source + | |
|
127 | self.db_repo.pull_requests_target) | |
|
128 | h.flash( | |
|
129 | _('Cannot delete `{repo}` it still contains {num} attached pull requests. ' | |
|
130 | 'Consider archiving the repository instead.').format( | |
|
131 | repo=self.db_repo_name, num=attached_prs), category='warning') | |
|
132 | ||
|
133 | # redirect to advanced for forks handle action ? | |
|
134 | raise HTTPFound(repo_advanced_url) | |
|
135 | ||
|
121 | 136 | except Exception: |
|
122 | 137 | log.exception("Exception during deletion of repository") |
|
123 | 138 | h.flash(_('An error occurred during deletion of `%s`') |
|
124 | 139 | % self.db_repo_name, category='error') |
|
125 | 140 | # redirect to advanced for more deletion options |
|
126 | 141 | raise HTTPFound( |
|
127 | 142 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name, |
|
128 | 143 | _anchor='advanced-delete')) |
|
129 | 144 | |
|
130 | 145 | raise HTTPFound(h.route_path('home')) |
|
131 | 146 | |
|
132 | 147 | @LoginRequired() |
|
133 | 148 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
134 | 149 | @CSRFRequired() |
|
135 | 150 | @view_config( |
|
136 | 151 | route_name='edit_repo_advanced_journal', request_method='POST', |
|
137 | 152 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
138 | 153 | def edit_advanced_journal(self): |
|
139 | 154 | """ |
|
140 | 155 | Set's this repository to be visible in public journal, |
|
141 | 156 | in other words making default user to follow this repo |
|
142 | 157 | """ |
|
143 | 158 | _ = self.request.translate |
|
144 | 159 | |
|
145 | 160 | try: |
|
146 | 161 | user_id = User.get_default_user().user_id |
|
147 | 162 | ScmModel().toggle_following_repo(self.db_repo.repo_id, user_id) |
|
148 | 163 | h.flash(_('Updated repository visibility in public journal'), |
|
149 | 164 | category='success') |
|
150 | 165 | Session().commit() |
|
151 | 166 | except Exception: |
|
152 | 167 | h.flash(_('An error occurred during setting this ' |
|
153 | 168 | 'repository in public journal'), |
|
154 | 169 | category='error') |
|
155 | 170 | |
|
156 | 171 | raise HTTPFound( |
|
157 | 172 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) |
|
158 | 173 | |
|
159 | 174 | @LoginRequired() |
|
160 | 175 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
161 | 176 | @CSRFRequired() |
|
162 | 177 | @view_config( |
|
163 | 178 | route_name='edit_repo_advanced_fork', request_method='POST', |
|
164 | 179 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
165 | 180 | def edit_advanced_fork(self): |
|
166 | 181 | """ |
|
167 | 182 | Mark given repository as a fork of another |
|
168 | 183 | """ |
|
169 | 184 | _ = self.request.translate |
|
170 | 185 | |
|
171 | 186 | new_fork_id = safe_int(self.request.POST.get('id_fork_of')) |
|
172 | 187 | |
|
173 | 188 | # valid repo, re-check permissions |
|
174 | 189 | if new_fork_id: |
|
175 | 190 | repo = Repository.get(new_fork_id) |
|
176 | 191 | # ensure we have at least read access to the repo we mark |
|
177 | 192 | perm_check = HasRepoPermissionAny( |
|
178 | 193 | 'repository.read', 'repository.write', 'repository.admin') |
|
179 | 194 | |
|
180 | 195 | if repo and perm_check(repo_name=repo.repo_name): |
|
181 | 196 | new_fork_id = repo.repo_id |
|
182 | 197 | else: |
|
183 | 198 | new_fork_id = None |
|
184 | 199 | |
|
185 | 200 | try: |
|
186 | 201 | repo = ScmModel().mark_as_fork( |
|
187 | 202 | self.db_repo_name, new_fork_id, self._rhodecode_user.user_id) |
|
188 | 203 | fork = repo.fork.repo_name if repo.fork else _('Nothing') |
|
189 | 204 | Session().commit() |
|
190 | 205 | h.flash( |
|
191 | 206 | _('Marked repo %s as fork of %s') % (self.db_repo_name, fork), |
|
192 | 207 | category='success') |
|
193 | 208 | except RepositoryError as e: |
|
194 | 209 | log.exception("Repository Error occurred") |
|
195 | 210 | h.flash(str(e), category='error') |
|
196 | 211 | except Exception: |
|
197 | 212 | log.exception("Exception while editing fork") |
|
198 | 213 | h.flash(_('An error occurred during this operation'), |
|
199 | 214 | category='error') |
|
200 | 215 | |
|
201 | 216 | raise HTTPFound( |
|
202 | 217 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) |
|
203 | 218 | |
|
204 | 219 | @LoginRequired() |
|
205 | 220 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
206 | 221 | @CSRFRequired() |
|
207 | 222 | @view_config( |
|
208 | 223 | route_name='edit_repo_advanced_locking', request_method='POST', |
|
209 | 224 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
210 | 225 | def edit_advanced_locking(self): |
|
211 | 226 | """ |
|
212 | 227 | Toggle locking of repository |
|
213 | 228 | """ |
|
214 | 229 | _ = self.request.translate |
|
215 | 230 | set_lock = self.request.POST.get('set_lock') |
|
216 | 231 | set_unlock = self.request.POST.get('set_unlock') |
|
217 | 232 | |
|
218 | 233 | try: |
|
219 | 234 | if set_lock: |
|
220 | 235 | Repository.lock(self.db_repo, self._rhodecode_user.user_id, |
|
221 | 236 | lock_reason=Repository.LOCK_WEB) |
|
222 | 237 | h.flash(_('Locked repository'), category='success') |
|
223 | 238 | elif set_unlock: |
|
224 | 239 | Repository.unlock(self.db_repo) |
|
225 | 240 | h.flash(_('Unlocked repository'), category='success') |
|
226 | 241 | except Exception as e: |
|
227 | 242 | log.exception("Exception during unlocking") |
|
228 | 243 | h.flash(_('An error occurred during unlocking'), category='error') |
|
229 | 244 | |
|
230 | 245 | raise HTTPFound( |
|
231 | 246 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) |
|
232 | 247 | |
|
233 | 248 | @LoginRequired() |
|
234 | 249 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
235 | 250 | @view_config( |
|
236 | 251 | route_name='edit_repo_advanced_hooks', request_method='GET', |
|
237 | 252 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
238 | 253 | def edit_advanced_install_hooks(self): |
|
239 | 254 | """ |
|
240 | 255 | Install Hooks for repository |
|
241 | 256 | """ |
|
242 | 257 | _ = self.request.translate |
|
243 | 258 | self.load_default_context() |
|
244 | 259 | self.rhodecode_vcs_repo.install_hooks(force=True) |
|
245 | 260 | h.flash(_('installed updated hooks into this repository'), |
|
246 | 261 | category='success') |
|
247 | 262 | raise HTTPFound( |
|
248 | 263 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) |
@@ -1,155 +1,159 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Set of custom exceptions used in RhodeCode |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | from webob.exc import HTTPClientError |
|
26 | 26 | from pyramid.httpexceptions import HTTPBadGateway |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | class LdapUsernameError(Exception): |
|
30 | 30 | pass |
|
31 | 31 | |
|
32 | 32 | |
|
33 | 33 | class LdapPasswordError(Exception): |
|
34 | 34 | pass |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | class LdapConnectionError(Exception): |
|
38 | 38 | pass |
|
39 | 39 | |
|
40 | 40 | |
|
41 | 41 | class LdapImportError(Exception): |
|
42 | 42 | pass |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | class DefaultUserException(Exception): |
|
46 | 46 | pass |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | class UserOwnsReposException(Exception): |
|
50 | 50 | pass |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | class UserOwnsRepoGroupsException(Exception): |
|
54 | 54 | pass |
|
55 | 55 | |
|
56 | 56 | |
|
57 | 57 | class UserOwnsUserGroupsException(Exception): |
|
58 | 58 | pass |
|
59 | 59 | |
|
60 | 60 | |
|
61 | 61 | class UserGroupAssignedException(Exception): |
|
62 | 62 | pass |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | class StatusChangeOnClosedPullRequestError(Exception): |
|
66 | 66 | pass |
|
67 | 67 | |
|
68 | 68 | |
|
69 | 69 | class AttachedForksError(Exception): |
|
70 | 70 | pass |
|
71 | 71 | |
|
72 | 72 | |
|
73 | class AttachedPullRequestsError(Exception): | |
|
74 | pass | |
|
75 | ||
|
76 | ||
|
73 | 77 | class RepoGroupAssignmentError(Exception): |
|
74 | 78 | pass |
|
75 | 79 | |
|
76 | 80 | |
|
77 | 81 | class NonRelativePathError(Exception): |
|
78 | 82 | pass |
|
79 | 83 | |
|
80 | 84 | |
|
81 | 85 | class HTTPRequirementError(HTTPClientError): |
|
82 | 86 | title = explanation = 'Repository Requirement Missing' |
|
83 | 87 | reason = None |
|
84 | 88 | |
|
85 | 89 | def __init__(self, message, *args, **kwargs): |
|
86 | 90 | self.title = self.explanation = message |
|
87 | 91 | super(HTTPRequirementError, self).__init__(*args, **kwargs) |
|
88 | 92 | self.args = (message, ) |
|
89 | 93 | |
|
90 | 94 | |
|
91 | 95 | class HTTPLockedRC(HTTPClientError): |
|
92 | 96 | """ |
|
93 | 97 | Special Exception For locked Repos in RhodeCode, the return code can |
|
94 | 98 | be overwritten by _code keyword argument passed into constructors |
|
95 | 99 | """ |
|
96 | 100 | code = 423 |
|
97 | 101 | title = explanation = 'Repository Locked' |
|
98 | 102 | reason = None |
|
99 | 103 | |
|
100 | 104 | def __init__(self, message, *args, **kwargs): |
|
101 | 105 | from rhodecode import CONFIG |
|
102 | 106 | from rhodecode.lib.utils2 import safe_int |
|
103 | 107 | _code = CONFIG.get('lock_ret_code') |
|
104 | 108 | self.code = safe_int(_code, self.code) |
|
105 | 109 | self.title = self.explanation = message |
|
106 | 110 | super(HTTPLockedRC, self).__init__(*args, **kwargs) |
|
107 | 111 | self.args = (message, ) |
|
108 | 112 | |
|
109 | 113 | |
|
110 | 114 | class HTTPBranchProtected(HTTPClientError): |
|
111 | 115 | """ |
|
112 | 116 | Special Exception For Indicating that branch is protected in RhodeCode, the |
|
113 | 117 | return code can be overwritten by _code keyword argument passed into constructors |
|
114 | 118 | """ |
|
115 | 119 | code = 403 |
|
116 | 120 | title = explanation = 'Branch Protected' |
|
117 | 121 | reason = None |
|
118 | 122 | |
|
119 | 123 | def __init__(self, message, *args, **kwargs): |
|
120 | 124 | self.title = self.explanation = message |
|
121 | 125 | super(HTTPBranchProtected, self).__init__(*args, **kwargs) |
|
122 | 126 | self.args = (message, ) |
|
123 | 127 | |
|
124 | 128 | |
|
125 | 129 | class IMCCommitError(Exception): |
|
126 | 130 | pass |
|
127 | 131 | |
|
128 | 132 | |
|
129 | 133 | class UserCreationError(Exception): |
|
130 | 134 | pass |
|
131 | 135 | |
|
132 | 136 | |
|
133 | 137 | class NotAllowedToCreateUserError(Exception): |
|
134 | 138 | pass |
|
135 | 139 | |
|
136 | 140 | |
|
137 | 141 | class RepositoryCreationError(Exception): |
|
138 | 142 | pass |
|
139 | 143 | |
|
140 | 144 | |
|
141 | 145 | class VCSServerUnavailable(HTTPBadGateway): |
|
142 | 146 | """ HTTP Exception class for VCS Server errors """ |
|
143 | 147 | code = 502 |
|
144 | 148 | title = 'VCS Server Error' |
|
145 | 149 | causes = [ |
|
146 | 150 | 'VCS Server is not running', |
|
147 | 151 | 'Incorrect vcs.server=host:port', |
|
148 | 152 | 'Incorrect vcs.server.protocol', |
|
149 | 153 | ] |
|
150 | 154 | |
|
151 | 155 | def __init__(self, message=''): |
|
152 | 156 | self.explanation = 'Could not connect to VCS Server' |
|
153 | 157 | if message: |
|
154 | 158 | self.explanation += ': ' + message |
|
155 | 159 | super(VCSServerUnavailable, self).__init__() |
@@ -1,1047 +1,1053 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import re |
|
23 | 23 | import shutil |
|
24 | 24 | import time |
|
25 | 25 | import logging |
|
26 | 26 | import traceback |
|
27 | 27 | import datetime |
|
28 | 28 | |
|
29 | 29 | from pyramid.threadlocal import get_current_request |
|
30 | 30 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
31 | 31 | |
|
32 | 32 | from rhodecode import events |
|
33 | 33 | from rhodecode.lib.auth import HasUserGroupPermissionAny |
|
34 | 34 | from rhodecode.lib.caching_query import FromCache |
|
35 | from rhodecode.lib.exceptions import AttachedForksError | |
|
35 | from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError | |
|
36 | 36 | from rhodecode.lib.hooks_base import log_delete_repository |
|
37 | 37 | from rhodecode.lib.user_log_filter import user_log_filter |
|
38 | 38 | from rhodecode.lib.utils import make_db_config |
|
39 | 39 | from rhodecode.lib.utils2 import ( |
|
40 | 40 | safe_str, safe_unicode, remove_prefix, obfuscate_url_pw, |
|
41 | 41 | get_current_rhodecode_user, safe_int, datetime_to_time, |
|
42 | 42 | action_logger_generic) |
|
43 | 43 | from rhodecode.lib.vcs.backends import get_backend |
|
44 | 44 | from rhodecode.model import BaseModel |
|
45 | 45 | from rhodecode.model.db import ( |
|
46 | 46 | _hash_key, joinedload, or_, Repository, UserRepoToPerm, UserGroupRepoToPerm, |
|
47 | 47 | UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission, |
|
48 | 48 | Statistics, UserGroup, RepoGroup, RepositoryField, UserLog) |
|
49 | 49 | |
|
50 | 50 | from rhodecode.model.settings import VcsSettingsModel |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | log = logging.getLogger(__name__) |
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | class RepoModel(BaseModel): |
|
57 | 57 | |
|
58 | 58 | cls = Repository |
|
59 | 59 | |
|
60 | 60 | def _get_user_group(self, users_group): |
|
61 | 61 | return self._get_instance(UserGroup, users_group, |
|
62 | 62 | callback=UserGroup.get_by_group_name) |
|
63 | 63 | |
|
64 | 64 | def _get_repo_group(self, repo_group): |
|
65 | 65 | return self._get_instance(RepoGroup, repo_group, |
|
66 | 66 | callback=RepoGroup.get_by_group_name) |
|
67 | 67 | |
|
68 | 68 | def _create_default_perms(self, repository, private): |
|
69 | 69 | # create default permission |
|
70 | 70 | default = 'repository.read' |
|
71 | 71 | def_user = User.get_default_user() |
|
72 | 72 | for p in def_user.user_perms: |
|
73 | 73 | if p.permission.permission_name.startswith('repository.'): |
|
74 | 74 | default = p.permission.permission_name |
|
75 | 75 | break |
|
76 | 76 | |
|
77 | 77 | default_perm = 'repository.none' if private else default |
|
78 | 78 | |
|
79 | 79 | repo_to_perm = UserRepoToPerm() |
|
80 | 80 | repo_to_perm.permission = Permission.get_by_key(default_perm) |
|
81 | 81 | |
|
82 | 82 | repo_to_perm.repository = repository |
|
83 | 83 | repo_to_perm.user_id = def_user.user_id |
|
84 | 84 | |
|
85 | 85 | return repo_to_perm |
|
86 | 86 | |
|
87 | 87 | @LazyProperty |
|
88 | 88 | def repos_path(self): |
|
89 | 89 | """ |
|
90 | 90 | Gets the repositories root path from database |
|
91 | 91 | """ |
|
92 | 92 | settings_model = VcsSettingsModel(sa=self.sa) |
|
93 | 93 | return settings_model.get_repos_location() |
|
94 | 94 | |
|
95 | 95 | def get(self, repo_id): |
|
96 | 96 | repo = self.sa.query(Repository) \ |
|
97 | 97 | .filter(Repository.repo_id == repo_id) |
|
98 | 98 | |
|
99 | 99 | return repo.scalar() |
|
100 | 100 | |
|
101 | 101 | def get_repo(self, repository): |
|
102 | 102 | return self._get_repo(repository) |
|
103 | 103 | |
|
104 | 104 | def get_by_repo_name(self, repo_name, cache=False): |
|
105 | 105 | repo = self.sa.query(Repository) \ |
|
106 | 106 | .filter(Repository.repo_name == repo_name) |
|
107 | 107 | |
|
108 | 108 | if cache: |
|
109 | 109 | name_key = _hash_key(repo_name) |
|
110 | 110 | repo = repo.options( |
|
111 | 111 | FromCache("sql_cache_short", "get_repo_%s" % name_key)) |
|
112 | 112 | return repo.scalar() |
|
113 | 113 | |
|
114 | 114 | def _extract_id_from_repo_name(self, repo_name): |
|
115 | 115 | if repo_name.startswith('/'): |
|
116 | 116 | repo_name = repo_name.lstrip('/') |
|
117 | 117 | by_id_match = re.match(r'^_(\d{1,})', repo_name) |
|
118 | 118 | if by_id_match: |
|
119 | 119 | return by_id_match.groups()[0] |
|
120 | 120 | |
|
121 | 121 | def get_repo_by_id(self, repo_name): |
|
122 | 122 | """ |
|
123 | 123 | Extracts repo_name by id from special urls. |
|
124 | 124 | Example url is _11/repo_name |
|
125 | 125 | |
|
126 | 126 | :param repo_name: |
|
127 | 127 | :return: repo object if matched else None |
|
128 | 128 | """ |
|
129 | 129 | |
|
130 | 130 | try: |
|
131 | 131 | _repo_id = self._extract_id_from_repo_name(repo_name) |
|
132 | 132 | if _repo_id: |
|
133 | 133 | return self.get(_repo_id) |
|
134 | 134 | except Exception: |
|
135 | 135 | log.exception('Failed to extract repo_name from URL') |
|
136 | 136 | |
|
137 | 137 | return None |
|
138 | 138 | |
|
139 | 139 | def get_repos_for_root(self, root, traverse=False): |
|
140 | 140 | if traverse: |
|
141 | 141 | like_expression = u'{}%'.format(safe_unicode(root)) |
|
142 | 142 | repos = Repository.query().filter( |
|
143 | 143 | Repository.repo_name.like(like_expression)).all() |
|
144 | 144 | else: |
|
145 | 145 | if root and not isinstance(root, RepoGroup): |
|
146 | 146 | raise ValueError( |
|
147 | 147 | 'Root must be an instance ' |
|
148 | 148 | 'of RepoGroup, got:{} instead'.format(type(root))) |
|
149 | 149 | repos = Repository.query().filter(Repository.group == root).all() |
|
150 | 150 | return repos |
|
151 | 151 | |
|
152 | 152 | def get_url(self, repo, request=None, permalink=False): |
|
153 | 153 | if not request: |
|
154 | 154 | request = get_current_request() |
|
155 | 155 | |
|
156 | 156 | if not request: |
|
157 | 157 | return |
|
158 | 158 | |
|
159 | 159 | if permalink: |
|
160 | 160 | return request.route_url( |
|
161 | 161 | 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id))) |
|
162 | 162 | else: |
|
163 | 163 | return request.route_url( |
|
164 | 164 | 'repo_summary', repo_name=safe_str(repo.repo_name)) |
|
165 | 165 | |
|
166 | 166 | def get_commit_url(self, repo, commit_id, request=None, permalink=False): |
|
167 | 167 | if not request: |
|
168 | 168 | request = get_current_request() |
|
169 | 169 | |
|
170 | 170 | if not request: |
|
171 | 171 | return |
|
172 | 172 | |
|
173 | 173 | if permalink: |
|
174 | 174 | return request.route_url( |
|
175 | 175 | 'repo_commit', repo_name=safe_str(repo.repo_id), |
|
176 | 176 | commit_id=commit_id) |
|
177 | 177 | |
|
178 | 178 | else: |
|
179 | 179 | return request.route_url( |
|
180 | 180 | 'repo_commit', repo_name=safe_str(repo.repo_name), |
|
181 | 181 | commit_id=commit_id) |
|
182 | 182 | |
|
183 | 183 | def get_repo_log(self, repo, filter_term): |
|
184 | 184 | repo_log = UserLog.query()\ |
|
185 | 185 | .filter(or_(UserLog.repository_id == repo.repo_id, |
|
186 | 186 | UserLog.repository_name == repo.repo_name))\ |
|
187 | 187 | .options(joinedload(UserLog.user))\ |
|
188 | 188 | .options(joinedload(UserLog.repository))\ |
|
189 | 189 | .order_by(UserLog.action_date.desc()) |
|
190 | 190 | |
|
191 | 191 | repo_log = user_log_filter(repo_log, filter_term) |
|
192 | 192 | return repo_log |
|
193 | 193 | |
|
194 | 194 | @classmethod |
|
195 | 195 | def update_repoinfo(cls, repositories=None): |
|
196 | 196 | if not repositories: |
|
197 | 197 | repositories = Repository.getAll() |
|
198 | 198 | for repo in repositories: |
|
199 | 199 | repo.update_commit_cache() |
|
200 | 200 | |
|
201 | 201 | def get_repos_as_dict(self, repo_list=None, admin=False, |
|
202 | 202 | super_user_actions=False): |
|
203 | 203 | _render = get_current_request().get_partial_renderer( |
|
204 | 204 | 'rhodecode:templates/data_table/_dt_elements.mako') |
|
205 | 205 | c = _render.get_call_context() |
|
206 | 206 | |
|
207 | 207 | def quick_menu(repo_name): |
|
208 | 208 | return _render('quick_menu', repo_name) |
|
209 | 209 | |
|
210 | 210 | def repo_lnk(name, rtype, rstate, private, fork_of): |
|
211 | 211 | return _render('repo_name', name, rtype, rstate, private, fork_of, |
|
212 | 212 | short_name=not admin, admin=False) |
|
213 | 213 | |
|
214 | 214 | def last_change(last_change): |
|
215 | 215 | if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo: |
|
216 | 216 | last_change = last_change + datetime.timedelta(seconds= |
|
217 | 217 | (datetime.datetime.now() - datetime.datetime.utcnow()).seconds) |
|
218 | 218 | return _render("last_change", last_change) |
|
219 | 219 | |
|
220 | 220 | def rss_lnk(repo_name): |
|
221 | 221 | return _render("rss", repo_name) |
|
222 | 222 | |
|
223 | 223 | def atom_lnk(repo_name): |
|
224 | 224 | return _render("atom", repo_name) |
|
225 | 225 | |
|
226 | 226 | def last_rev(repo_name, cs_cache): |
|
227 | 227 | return _render('revision', repo_name, cs_cache.get('revision'), |
|
228 | 228 | cs_cache.get('raw_id'), cs_cache.get('author'), |
|
229 | 229 | cs_cache.get('message'), cs_cache.get('date')) |
|
230 | 230 | |
|
231 | 231 | def desc(desc): |
|
232 | 232 | return _render('repo_desc', desc, c.visual.stylify_metatags) |
|
233 | 233 | |
|
234 | 234 | def state(repo_state): |
|
235 | 235 | return _render("repo_state", repo_state) |
|
236 | 236 | |
|
237 | 237 | def repo_actions(repo_name): |
|
238 | 238 | return _render('repo_actions', repo_name, super_user_actions) |
|
239 | 239 | |
|
240 | 240 | def user_profile(username): |
|
241 | 241 | return _render('user_profile', username) |
|
242 | 242 | |
|
243 | 243 | repos_data = [] |
|
244 | 244 | for repo in repo_list: |
|
245 | 245 | cs_cache = repo.changeset_cache |
|
246 | 246 | row = { |
|
247 | 247 | "menu": quick_menu(repo.repo_name), |
|
248 | 248 | |
|
249 | 249 | "name": repo_lnk(repo.repo_name, repo.repo_type, |
|
250 | 250 | repo.repo_state, repo.private, repo.fork), |
|
251 | 251 | "name_raw": repo.repo_name.lower(), |
|
252 | 252 | |
|
253 | 253 | "last_change": last_change(repo.last_db_change), |
|
254 | 254 | "last_change_raw": datetime_to_time(repo.last_db_change), |
|
255 | 255 | |
|
256 | 256 | "last_changeset": last_rev(repo.repo_name, cs_cache), |
|
257 | 257 | "last_changeset_raw": cs_cache.get('revision'), |
|
258 | 258 | |
|
259 | 259 | "desc": desc(repo.description_safe), |
|
260 | 260 | "owner": user_profile(repo.user.username), |
|
261 | 261 | |
|
262 | 262 | "state": state(repo.repo_state), |
|
263 | 263 | "rss": rss_lnk(repo.repo_name), |
|
264 | 264 | |
|
265 | 265 | "atom": atom_lnk(repo.repo_name), |
|
266 | 266 | } |
|
267 | 267 | if admin: |
|
268 | 268 | row.update({ |
|
269 | 269 | "action": repo_actions(repo.repo_name), |
|
270 | 270 | }) |
|
271 | 271 | repos_data.append(row) |
|
272 | 272 | |
|
273 | 273 | return repos_data |
|
274 | 274 | |
|
275 | 275 | def _get_defaults(self, repo_name): |
|
276 | 276 | """ |
|
277 | 277 | Gets information about repository, and returns a dict for |
|
278 | 278 | usage in forms |
|
279 | 279 | |
|
280 | 280 | :param repo_name: |
|
281 | 281 | """ |
|
282 | 282 | |
|
283 | 283 | repo_info = Repository.get_by_repo_name(repo_name) |
|
284 | 284 | |
|
285 | 285 | if repo_info is None: |
|
286 | 286 | return None |
|
287 | 287 | |
|
288 | 288 | defaults = repo_info.get_dict() |
|
289 | 289 | defaults['repo_name'] = repo_info.just_name |
|
290 | 290 | |
|
291 | 291 | groups = repo_info.groups_with_parents |
|
292 | 292 | parent_group = groups[-1] if groups else None |
|
293 | 293 | |
|
294 | 294 | # we use -1 as this is how in HTML, we mark an empty group |
|
295 | 295 | defaults['repo_group'] = getattr(parent_group, 'group_id', -1) |
|
296 | 296 | |
|
297 | 297 | keys_to_process = ( |
|
298 | 298 | {'k': 'repo_type', 'strip': False}, |
|
299 | 299 | {'k': 'repo_enable_downloads', 'strip': True}, |
|
300 | 300 | {'k': 'repo_description', 'strip': True}, |
|
301 | 301 | {'k': 'repo_enable_locking', 'strip': True}, |
|
302 | 302 | {'k': 'repo_landing_rev', 'strip': True}, |
|
303 | 303 | {'k': 'clone_uri', 'strip': False}, |
|
304 | 304 | {'k': 'push_uri', 'strip': False}, |
|
305 | 305 | {'k': 'repo_private', 'strip': True}, |
|
306 | 306 | {'k': 'repo_enable_statistics', 'strip': True} |
|
307 | 307 | ) |
|
308 | 308 | |
|
309 | 309 | for item in keys_to_process: |
|
310 | 310 | attr = item['k'] |
|
311 | 311 | if item['strip']: |
|
312 | 312 | attr = remove_prefix(item['k'], 'repo_') |
|
313 | 313 | |
|
314 | 314 | val = defaults[attr] |
|
315 | 315 | if item['k'] == 'repo_landing_rev': |
|
316 | 316 | val = ':'.join(defaults[attr]) |
|
317 | 317 | defaults[item['k']] = val |
|
318 | 318 | if item['k'] == 'clone_uri': |
|
319 | 319 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden |
|
320 | 320 | if item['k'] == 'push_uri': |
|
321 | 321 | defaults['push_uri_hidden'] = repo_info.push_uri_hidden |
|
322 | 322 | |
|
323 | 323 | # fill owner |
|
324 | 324 | if repo_info.user: |
|
325 | 325 | defaults.update({'user': repo_info.user.username}) |
|
326 | 326 | else: |
|
327 | 327 | replacement_user = User.get_first_super_admin().username |
|
328 | 328 | defaults.update({'user': replacement_user}) |
|
329 | 329 | |
|
330 | 330 | return defaults |
|
331 | 331 | |
|
332 | 332 | def update(self, repo, **kwargs): |
|
333 | 333 | try: |
|
334 | 334 | cur_repo = self._get_repo(repo) |
|
335 | 335 | source_repo_name = cur_repo.repo_name |
|
336 | 336 | if 'user' in kwargs: |
|
337 | 337 | cur_repo.user = User.get_by_username(kwargs['user']) |
|
338 | 338 | |
|
339 | 339 | if 'repo_group' in kwargs: |
|
340 | 340 | cur_repo.group = RepoGroup.get(kwargs['repo_group']) |
|
341 | 341 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) |
|
342 | 342 | |
|
343 | 343 | update_keys = [ |
|
344 | 344 | (1, 'repo_description'), |
|
345 | 345 | (1, 'repo_landing_rev'), |
|
346 | 346 | (1, 'repo_private'), |
|
347 | 347 | (1, 'repo_enable_downloads'), |
|
348 | 348 | (1, 'repo_enable_locking'), |
|
349 | 349 | (1, 'repo_enable_statistics'), |
|
350 | 350 | (0, 'clone_uri'), |
|
351 | 351 | (0, 'push_uri'), |
|
352 | 352 | (0, 'fork_id') |
|
353 | 353 | ] |
|
354 | 354 | for strip, k in update_keys: |
|
355 | 355 | if k in kwargs: |
|
356 | 356 | val = kwargs[k] |
|
357 | 357 | if strip: |
|
358 | 358 | k = remove_prefix(k, 'repo_') |
|
359 | 359 | |
|
360 | 360 | setattr(cur_repo, k, val) |
|
361 | 361 | |
|
362 | 362 | new_name = cur_repo.get_new_name(kwargs['repo_name']) |
|
363 | 363 | cur_repo.repo_name = new_name |
|
364 | 364 | |
|
365 | 365 | # if private flag is set, reset default permission to NONE |
|
366 | 366 | if kwargs.get('repo_private'): |
|
367 | 367 | EMPTY_PERM = 'repository.none' |
|
368 | 368 | RepoModel().grant_user_permission( |
|
369 | 369 | repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM |
|
370 | 370 | ) |
|
371 | 371 | |
|
372 | 372 | # handle extra fields |
|
373 | 373 | for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), |
|
374 | 374 | kwargs): |
|
375 | 375 | k = RepositoryField.un_prefix_key(field) |
|
376 | 376 | ex_field = RepositoryField.get_by_key_name( |
|
377 | 377 | key=k, repo=cur_repo) |
|
378 | 378 | if ex_field: |
|
379 | 379 | ex_field.field_value = kwargs[field] |
|
380 | 380 | self.sa.add(ex_field) |
|
381 | 381 | cur_repo.updated_on = datetime.datetime.now() |
|
382 | 382 | self.sa.add(cur_repo) |
|
383 | 383 | |
|
384 | 384 | if source_repo_name != new_name: |
|
385 | 385 | # rename repository |
|
386 | 386 | self._rename_filesystem_repo( |
|
387 | 387 | old=source_repo_name, new=new_name) |
|
388 | 388 | |
|
389 | 389 | return cur_repo |
|
390 | 390 | except Exception: |
|
391 | 391 | log.error(traceback.format_exc()) |
|
392 | 392 | raise |
|
393 | 393 | |
|
394 | 394 | def _create_repo(self, repo_name, repo_type, description, owner, |
|
395 | 395 | private=False, clone_uri=None, repo_group=None, |
|
396 | 396 | landing_rev='rev:tip', fork_of=None, |
|
397 | 397 | copy_fork_permissions=False, enable_statistics=False, |
|
398 | 398 | enable_locking=False, enable_downloads=False, |
|
399 | 399 | copy_group_permissions=False, |
|
400 | 400 | state=Repository.STATE_PENDING): |
|
401 | 401 | """ |
|
402 | 402 | Create repository inside database with PENDING state, this should be |
|
403 | 403 | only executed by create() repo. With exception of importing existing |
|
404 | 404 | repos |
|
405 | 405 | """ |
|
406 | 406 | from rhodecode.model.scm import ScmModel |
|
407 | 407 | |
|
408 | 408 | owner = self._get_user(owner) |
|
409 | 409 | fork_of = self._get_repo(fork_of) |
|
410 | 410 | repo_group = self._get_repo_group(safe_int(repo_group)) |
|
411 | 411 | |
|
412 | 412 | try: |
|
413 | 413 | repo_name = safe_unicode(repo_name) |
|
414 | 414 | description = safe_unicode(description) |
|
415 | 415 | # repo name is just a name of repository |
|
416 | 416 | # while repo_name_full is a full qualified name that is combined |
|
417 | 417 | # with name and path of group |
|
418 | 418 | repo_name_full = repo_name |
|
419 | 419 | repo_name = repo_name.split(Repository.NAME_SEP)[-1] |
|
420 | 420 | |
|
421 | 421 | new_repo = Repository() |
|
422 | 422 | new_repo.repo_state = state |
|
423 | 423 | new_repo.enable_statistics = False |
|
424 | 424 | new_repo.repo_name = repo_name_full |
|
425 | 425 | new_repo.repo_type = repo_type |
|
426 | 426 | new_repo.user = owner |
|
427 | 427 | new_repo.group = repo_group |
|
428 | 428 | new_repo.description = description or repo_name |
|
429 | 429 | new_repo.private = private |
|
430 | 430 | new_repo.clone_uri = clone_uri |
|
431 | 431 | new_repo.landing_rev = landing_rev |
|
432 | 432 | |
|
433 | 433 | new_repo.enable_statistics = enable_statistics |
|
434 | 434 | new_repo.enable_locking = enable_locking |
|
435 | 435 | new_repo.enable_downloads = enable_downloads |
|
436 | 436 | |
|
437 | 437 | if repo_group: |
|
438 | 438 | new_repo.enable_locking = repo_group.enable_locking |
|
439 | 439 | |
|
440 | 440 | if fork_of: |
|
441 | 441 | parent_repo = fork_of |
|
442 | 442 | new_repo.fork = parent_repo |
|
443 | 443 | |
|
444 | 444 | events.trigger(events.RepoPreCreateEvent(new_repo)) |
|
445 | 445 | |
|
446 | 446 | self.sa.add(new_repo) |
|
447 | 447 | |
|
448 | 448 | EMPTY_PERM = 'repository.none' |
|
449 | 449 | if fork_of and copy_fork_permissions: |
|
450 | 450 | repo = fork_of |
|
451 | 451 | user_perms = UserRepoToPerm.query() \ |
|
452 | 452 | .filter(UserRepoToPerm.repository == repo).all() |
|
453 | 453 | group_perms = UserGroupRepoToPerm.query() \ |
|
454 | 454 | .filter(UserGroupRepoToPerm.repository == repo).all() |
|
455 | 455 | |
|
456 | 456 | for perm in user_perms: |
|
457 | 457 | UserRepoToPerm.create( |
|
458 | 458 | perm.user, new_repo, perm.permission) |
|
459 | 459 | |
|
460 | 460 | for perm in group_perms: |
|
461 | 461 | UserGroupRepoToPerm.create( |
|
462 | 462 | perm.users_group, new_repo, perm.permission) |
|
463 | 463 | # in case we copy permissions and also set this repo to private |
|
464 | 464 | # override the default user permission to make it a private |
|
465 | 465 | # repo |
|
466 | 466 | if private: |
|
467 | 467 | RepoModel(self.sa).grant_user_permission( |
|
468 | 468 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
469 | 469 | |
|
470 | 470 | elif repo_group and copy_group_permissions: |
|
471 | 471 | user_perms = UserRepoGroupToPerm.query() \ |
|
472 | 472 | .filter(UserRepoGroupToPerm.group == repo_group).all() |
|
473 | 473 | |
|
474 | 474 | group_perms = UserGroupRepoGroupToPerm.query() \ |
|
475 | 475 | .filter(UserGroupRepoGroupToPerm.group == repo_group).all() |
|
476 | 476 | |
|
477 | 477 | for perm in user_perms: |
|
478 | 478 | perm_name = perm.permission.permission_name.replace( |
|
479 | 479 | 'group.', 'repository.') |
|
480 | 480 | perm_obj = Permission.get_by_key(perm_name) |
|
481 | 481 | UserRepoToPerm.create(perm.user, new_repo, perm_obj) |
|
482 | 482 | |
|
483 | 483 | for perm in group_perms: |
|
484 | 484 | perm_name = perm.permission.permission_name.replace( |
|
485 | 485 | 'group.', 'repository.') |
|
486 | 486 | perm_obj = Permission.get_by_key(perm_name) |
|
487 | 487 | UserGroupRepoToPerm.create( |
|
488 | 488 | perm.users_group, new_repo, perm_obj) |
|
489 | 489 | |
|
490 | 490 | if private: |
|
491 | 491 | RepoModel(self.sa).grant_user_permission( |
|
492 | 492 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
493 | 493 | |
|
494 | 494 | else: |
|
495 | 495 | perm_obj = self._create_default_perms(new_repo, private) |
|
496 | 496 | self.sa.add(perm_obj) |
|
497 | 497 | |
|
498 | 498 | # now automatically start following this repository as owner |
|
499 | 499 | ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, |
|
500 | 500 | owner.user_id) |
|
501 | 501 | |
|
502 | 502 | # we need to flush here, in order to check if database won't |
|
503 | 503 | # throw any exceptions, create filesystem dirs at the very end |
|
504 | 504 | self.sa.flush() |
|
505 | 505 | events.trigger(events.RepoCreateEvent(new_repo)) |
|
506 | 506 | return new_repo |
|
507 | 507 | |
|
508 | 508 | except Exception: |
|
509 | 509 | log.error(traceback.format_exc()) |
|
510 | 510 | raise |
|
511 | 511 | |
|
512 | 512 | def create(self, form_data, cur_user): |
|
513 | 513 | """ |
|
514 | 514 | Create repository using celery tasks |
|
515 | 515 | |
|
516 | 516 | :param form_data: |
|
517 | 517 | :param cur_user: |
|
518 | 518 | """ |
|
519 | 519 | from rhodecode.lib.celerylib import tasks, run_task |
|
520 | 520 | return run_task(tasks.create_repo, form_data, cur_user) |
|
521 | 521 | |
|
522 | 522 | def update_permissions(self, repo, perm_additions=None, perm_updates=None, |
|
523 | 523 | perm_deletions=None, check_perms=True, |
|
524 | 524 | cur_user=None): |
|
525 | 525 | if not perm_additions: |
|
526 | 526 | perm_additions = [] |
|
527 | 527 | if not perm_updates: |
|
528 | 528 | perm_updates = [] |
|
529 | 529 | if not perm_deletions: |
|
530 | 530 | perm_deletions = [] |
|
531 | 531 | |
|
532 | 532 | req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin') |
|
533 | 533 | |
|
534 | 534 | changes = { |
|
535 | 535 | 'added': [], |
|
536 | 536 | 'updated': [], |
|
537 | 537 | 'deleted': [] |
|
538 | 538 | } |
|
539 | 539 | # update permissions |
|
540 | 540 | for member_id, perm, member_type in perm_updates: |
|
541 | 541 | member_id = int(member_id) |
|
542 | 542 | if member_type == 'user': |
|
543 | 543 | member_name = User.get(member_id).username |
|
544 | 544 | # this updates also current one if found |
|
545 | 545 | self.grant_user_permission( |
|
546 | 546 | repo=repo, user=member_id, perm=perm) |
|
547 | 547 | elif member_type == 'user_group': |
|
548 | 548 | # check if we have permissions to alter this usergroup |
|
549 | 549 | member_name = UserGroup.get(member_id).users_group_name |
|
550 | 550 | if not check_perms or HasUserGroupPermissionAny( |
|
551 | 551 | *req_perms)(member_name, user=cur_user): |
|
552 | 552 | self.grant_user_group_permission( |
|
553 | 553 | repo=repo, group_name=member_id, perm=perm) |
|
554 | 554 | else: |
|
555 | 555 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
556 | 556 | "got {} instead".format(member_type)) |
|
557 | 557 | changes['updated'].append({'type': member_type, 'id': member_id, |
|
558 | 558 | 'name': member_name, 'new_perm': perm}) |
|
559 | 559 | |
|
560 | 560 | # set new permissions |
|
561 | 561 | for member_id, perm, member_type in perm_additions: |
|
562 | 562 | member_id = int(member_id) |
|
563 | 563 | if member_type == 'user': |
|
564 | 564 | member_name = User.get(member_id).username |
|
565 | 565 | self.grant_user_permission( |
|
566 | 566 | repo=repo, user=member_id, perm=perm) |
|
567 | 567 | elif member_type == 'user_group': |
|
568 | 568 | # check if we have permissions to alter this usergroup |
|
569 | 569 | member_name = UserGroup.get(member_id).users_group_name |
|
570 | 570 | if not check_perms or HasUserGroupPermissionAny( |
|
571 | 571 | *req_perms)(member_name, user=cur_user): |
|
572 | 572 | self.grant_user_group_permission( |
|
573 | 573 | repo=repo, group_name=member_id, perm=perm) |
|
574 | 574 | else: |
|
575 | 575 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
576 | 576 | "got {} instead".format(member_type)) |
|
577 | 577 | |
|
578 | 578 | changes['added'].append({'type': member_type, 'id': member_id, |
|
579 | 579 | 'name': member_name, 'new_perm': perm}) |
|
580 | 580 | # delete permissions |
|
581 | 581 | for member_id, perm, member_type in perm_deletions: |
|
582 | 582 | member_id = int(member_id) |
|
583 | 583 | if member_type == 'user': |
|
584 | 584 | member_name = User.get(member_id).username |
|
585 | 585 | self.revoke_user_permission(repo=repo, user=member_id) |
|
586 | 586 | elif member_type == 'user_group': |
|
587 | 587 | # check if we have permissions to alter this usergroup |
|
588 | 588 | member_name = UserGroup.get(member_id).users_group_name |
|
589 | 589 | if not check_perms or HasUserGroupPermissionAny( |
|
590 | 590 | *req_perms)(member_name, user=cur_user): |
|
591 | 591 | self.revoke_user_group_permission( |
|
592 | 592 | repo=repo, group_name=member_id) |
|
593 | 593 | else: |
|
594 | 594 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
595 | 595 | "got {} instead".format(member_type)) |
|
596 | 596 | |
|
597 | 597 | changes['deleted'].append({'type': member_type, 'id': member_id, |
|
598 | 598 | 'name': member_name, 'new_perm': perm}) |
|
599 | 599 | return changes |
|
600 | 600 | |
|
601 | 601 | def create_fork(self, form_data, cur_user): |
|
602 | 602 | """ |
|
603 | 603 | Simple wrapper into executing celery task for fork creation |
|
604 | 604 | |
|
605 | 605 | :param form_data: |
|
606 | 606 | :param cur_user: |
|
607 | 607 | """ |
|
608 | 608 | from rhodecode.lib.celerylib import tasks, run_task |
|
609 | 609 | return run_task(tasks.create_repo_fork, form_data, cur_user) |
|
610 | 610 | |
|
611 | def delete(self, repo, forks=None, fs_remove=True, cur_user=None): | |
|
611 | def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None): | |
|
612 | 612 | """ |
|
613 | 613 | Delete given repository, forks parameter defines what do do with |
|
614 | 614 | attached forks. Throws AttachedForksError if deleted repo has attached |
|
615 | 615 | forks |
|
616 | 616 | |
|
617 | 617 | :param repo: |
|
618 | 618 | :param forks: str 'delete' or 'detach' |
|
619 | 619 | :param fs_remove: remove(archive) repo from filesystem |
|
620 | 620 | """ |
|
621 | 621 | if not cur_user: |
|
622 | 622 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
623 | 623 | repo = self._get_repo(repo) |
|
624 | 624 | if repo: |
|
625 | 625 | if forks == 'detach': |
|
626 | 626 | for r in repo.forks: |
|
627 | 627 | r.fork = None |
|
628 | 628 | self.sa.add(r) |
|
629 | 629 | elif forks == 'delete': |
|
630 | 630 | for r in repo.forks: |
|
631 | 631 | self.delete(r, forks='delete') |
|
632 | 632 | elif [f for f in repo.forks]: |
|
633 | 633 | raise AttachedForksError() |
|
634 | 634 | |
|
635 | # check for pull requests | |
|
636 | pr_sources = repo.pull_requests_source | |
|
637 | pr_targets = repo.pull_requests_target | |
|
638 | if pull_requests != 'delete' and (pr_sources or pr_targets): | |
|
639 | raise AttachedPullRequestsError() | |
|
640 | ||
|
635 | 641 | old_repo_dict = repo.get_dict() |
|
636 | 642 | events.trigger(events.RepoPreDeleteEvent(repo)) |
|
637 | 643 | try: |
|
638 | 644 | self.sa.delete(repo) |
|
639 | 645 | if fs_remove: |
|
640 | 646 | self._delete_filesystem_repo(repo) |
|
641 | 647 | else: |
|
642 | 648 | log.debug('skipping removal from filesystem') |
|
643 | 649 | old_repo_dict.update({ |
|
644 | 650 | 'deleted_by': cur_user, |
|
645 | 651 | 'deleted_on': time.time(), |
|
646 | 652 | }) |
|
647 | 653 | log_delete_repository(**old_repo_dict) |
|
648 | 654 | events.trigger(events.RepoDeleteEvent(repo)) |
|
649 | 655 | except Exception: |
|
650 | 656 | log.error(traceback.format_exc()) |
|
651 | 657 | raise |
|
652 | 658 | |
|
653 | 659 | def grant_user_permission(self, repo, user, perm): |
|
654 | 660 | """ |
|
655 | 661 | Grant permission for user on given repository, or update existing one |
|
656 | 662 | if found |
|
657 | 663 | |
|
658 | 664 | :param repo: Instance of Repository, repository_id, or repository name |
|
659 | 665 | :param user: Instance of User, user_id or username |
|
660 | 666 | :param perm: Instance of Permission, or permission_name |
|
661 | 667 | """ |
|
662 | 668 | user = self._get_user(user) |
|
663 | 669 | repo = self._get_repo(repo) |
|
664 | 670 | permission = self._get_perm(perm) |
|
665 | 671 | |
|
666 | 672 | # check if we have that permission already |
|
667 | 673 | obj = self.sa.query(UserRepoToPerm) \ |
|
668 | 674 | .filter(UserRepoToPerm.user == user) \ |
|
669 | 675 | .filter(UserRepoToPerm.repository == repo) \ |
|
670 | 676 | .scalar() |
|
671 | 677 | if obj is None: |
|
672 | 678 | # create new ! |
|
673 | 679 | obj = UserRepoToPerm() |
|
674 | 680 | obj.repository = repo |
|
675 | 681 | obj.user = user |
|
676 | 682 | obj.permission = permission |
|
677 | 683 | self.sa.add(obj) |
|
678 | 684 | log.debug('Granted perm %s to %s on %s', perm, user, repo) |
|
679 | 685 | action_logger_generic( |
|
680 | 686 | 'granted permission: {} to user: {} on repo: {}'.format( |
|
681 | 687 | perm, user, repo), namespace='security.repo') |
|
682 | 688 | return obj |
|
683 | 689 | |
|
684 | 690 | def revoke_user_permission(self, repo, user): |
|
685 | 691 | """ |
|
686 | 692 | Revoke permission for user on given repository |
|
687 | 693 | |
|
688 | 694 | :param repo: Instance of Repository, repository_id, or repository name |
|
689 | 695 | :param user: Instance of User, user_id or username |
|
690 | 696 | """ |
|
691 | 697 | |
|
692 | 698 | user = self._get_user(user) |
|
693 | 699 | repo = self._get_repo(repo) |
|
694 | 700 | |
|
695 | 701 | obj = self.sa.query(UserRepoToPerm) \ |
|
696 | 702 | .filter(UserRepoToPerm.repository == repo) \ |
|
697 | 703 | .filter(UserRepoToPerm.user == user) \ |
|
698 | 704 | .scalar() |
|
699 | 705 | if obj: |
|
700 | 706 | self.sa.delete(obj) |
|
701 | 707 | log.debug('Revoked perm on %s on %s', repo, user) |
|
702 | 708 | action_logger_generic( |
|
703 | 709 | 'revoked permission from user: {} on repo: {}'.format( |
|
704 | 710 | user, repo), namespace='security.repo') |
|
705 | 711 | |
|
706 | 712 | def grant_user_group_permission(self, repo, group_name, perm): |
|
707 | 713 | """ |
|
708 | 714 | Grant permission for user group on given repository, or update |
|
709 | 715 | existing one if found |
|
710 | 716 | |
|
711 | 717 | :param repo: Instance of Repository, repository_id, or repository name |
|
712 | 718 | :param group_name: Instance of UserGroup, users_group_id, |
|
713 | 719 | or user group name |
|
714 | 720 | :param perm: Instance of Permission, or permission_name |
|
715 | 721 | """ |
|
716 | 722 | repo = self._get_repo(repo) |
|
717 | 723 | group_name = self._get_user_group(group_name) |
|
718 | 724 | permission = self._get_perm(perm) |
|
719 | 725 | |
|
720 | 726 | # check if we have that permission already |
|
721 | 727 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
722 | 728 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
723 | 729 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
724 | 730 | .scalar() |
|
725 | 731 | |
|
726 | 732 | if obj is None: |
|
727 | 733 | # create new |
|
728 | 734 | obj = UserGroupRepoToPerm() |
|
729 | 735 | |
|
730 | 736 | obj.repository = repo |
|
731 | 737 | obj.users_group = group_name |
|
732 | 738 | obj.permission = permission |
|
733 | 739 | self.sa.add(obj) |
|
734 | 740 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) |
|
735 | 741 | action_logger_generic( |
|
736 | 742 | 'granted permission: {} to usergroup: {} on repo: {}'.format( |
|
737 | 743 | perm, group_name, repo), namespace='security.repo') |
|
738 | 744 | |
|
739 | 745 | return obj |
|
740 | 746 | |
|
741 | 747 | def revoke_user_group_permission(self, repo, group_name): |
|
742 | 748 | """ |
|
743 | 749 | Revoke permission for user group on given repository |
|
744 | 750 | |
|
745 | 751 | :param repo: Instance of Repository, repository_id, or repository name |
|
746 | 752 | :param group_name: Instance of UserGroup, users_group_id, |
|
747 | 753 | or user group name |
|
748 | 754 | """ |
|
749 | 755 | repo = self._get_repo(repo) |
|
750 | 756 | group_name = self._get_user_group(group_name) |
|
751 | 757 | |
|
752 | 758 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
753 | 759 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
754 | 760 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
755 | 761 | .scalar() |
|
756 | 762 | if obj: |
|
757 | 763 | self.sa.delete(obj) |
|
758 | 764 | log.debug('Revoked perm to %s on %s', repo, group_name) |
|
759 | 765 | action_logger_generic( |
|
760 | 766 | 'revoked permission from usergroup: {} on repo: {}'.format( |
|
761 | 767 | group_name, repo), namespace='security.repo') |
|
762 | 768 | |
|
763 | 769 | def delete_stats(self, repo_name): |
|
764 | 770 | """ |
|
765 | 771 | removes stats for given repo |
|
766 | 772 | |
|
767 | 773 | :param repo_name: |
|
768 | 774 | """ |
|
769 | 775 | repo = self._get_repo(repo_name) |
|
770 | 776 | try: |
|
771 | 777 | obj = self.sa.query(Statistics) \ |
|
772 | 778 | .filter(Statistics.repository == repo).scalar() |
|
773 | 779 | if obj: |
|
774 | 780 | self.sa.delete(obj) |
|
775 | 781 | except Exception: |
|
776 | 782 | log.error(traceback.format_exc()) |
|
777 | 783 | raise |
|
778 | 784 | |
|
779 | 785 | def add_repo_field(self, repo_name, field_key, field_label, field_value='', |
|
780 | 786 | field_type='str', field_desc=''): |
|
781 | 787 | |
|
782 | 788 | repo = self._get_repo(repo_name) |
|
783 | 789 | |
|
784 | 790 | new_field = RepositoryField() |
|
785 | 791 | new_field.repository = repo |
|
786 | 792 | new_field.field_key = field_key |
|
787 | 793 | new_field.field_type = field_type # python type |
|
788 | 794 | new_field.field_value = field_value |
|
789 | 795 | new_field.field_desc = field_desc |
|
790 | 796 | new_field.field_label = field_label |
|
791 | 797 | self.sa.add(new_field) |
|
792 | 798 | return new_field |
|
793 | 799 | |
|
794 | 800 | def delete_repo_field(self, repo_name, field_key): |
|
795 | 801 | repo = self._get_repo(repo_name) |
|
796 | 802 | field = RepositoryField.get_by_key_name(field_key, repo) |
|
797 | 803 | if field: |
|
798 | 804 | self.sa.delete(field) |
|
799 | 805 | |
|
800 | 806 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, |
|
801 | 807 | clone_uri=None, repo_store_location=None, |
|
802 | 808 | use_global_config=False): |
|
803 | 809 | """ |
|
804 | 810 | makes repository on filesystem. It's group aware means it'll create |
|
805 | 811 | a repository within a group, and alter the paths accordingly of |
|
806 | 812 | group location |
|
807 | 813 | |
|
808 | 814 | :param repo_name: |
|
809 | 815 | :param alias: |
|
810 | 816 | :param parent: |
|
811 | 817 | :param clone_uri: |
|
812 | 818 | :param repo_store_location: |
|
813 | 819 | """ |
|
814 | 820 | from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group |
|
815 | 821 | from rhodecode.model.scm import ScmModel |
|
816 | 822 | |
|
817 | 823 | if Repository.NAME_SEP in repo_name: |
|
818 | 824 | raise ValueError( |
|
819 | 825 | 'repo_name must not contain groups got `%s`' % repo_name) |
|
820 | 826 | |
|
821 | 827 | if isinstance(repo_group, RepoGroup): |
|
822 | 828 | new_parent_path = os.sep.join(repo_group.full_path_splitted) |
|
823 | 829 | else: |
|
824 | 830 | new_parent_path = repo_group or '' |
|
825 | 831 | |
|
826 | 832 | if repo_store_location: |
|
827 | 833 | _paths = [repo_store_location] |
|
828 | 834 | else: |
|
829 | 835 | _paths = [self.repos_path, new_parent_path, repo_name] |
|
830 | 836 | # we need to make it str for mercurial |
|
831 | 837 | repo_path = os.path.join(*map(lambda x: safe_str(x), _paths)) |
|
832 | 838 | |
|
833 | 839 | # check if this path is not a repository |
|
834 | 840 | if is_valid_repo(repo_path, self.repos_path): |
|
835 | 841 | raise Exception('This path %s is a valid repository' % repo_path) |
|
836 | 842 | |
|
837 | 843 | # check if this path is a group |
|
838 | 844 | if is_valid_repo_group(repo_path, self.repos_path): |
|
839 | 845 | raise Exception('This path %s is a valid group' % repo_path) |
|
840 | 846 | |
|
841 | 847 | log.info('creating repo %s in %s from url: `%s`', |
|
842 | 848 | repo_name, safe_unicode(repo_path), |
|
843 | 849 | obfuscate_url_pw(clone_uri)) |
|
844 | 850 | |
|
845 | 851 | backend = get_backend(repo_type) |
|
846 | 852 | |
|
847 | 853 | config_repo = None if use_global_config else repo_name |
|
848 | 854 | if config_repo and new_parent_path: |
|
849 | 855 | config_repo = Repository.NAME_SEP.join( |
|
850 | 856 | (new_parent_path, config_repo)) |
|
851 | 857 | config = make_db_config(clear_session=False, repo=config_repo) |
|
852 | 858 | config.set('extensions', 'largefiles', '') |
|
853 | 859 | |
|
854 | 860 | # patch and reset hooks section of UI config to not run any |
|
855 | 861 | # hooks on creating remote repo |
|
856 | 862 | config.clear_section('hooks') |
|
857 | 863 | |
|
858 | 864 | # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice |
|
859 | 865 | if repo_type == 'git': |
|
860 | 866 | repo = backend( |
|
861 | 867 | repo_path, config=config, create=True, src_url=clone_uri, |
|
862 | 868 | bare=True) |
|
863 | 869 | else: |
|
864 | 870 | repo = backend( |
|
865 | 871 | repo_path, config=config, create=True, src_url=clone_uri) |
|
866 | 872 | |
|
867 | 873 | repo.install_hooks() |
|
868 | 874 | |
|
869 | 875 | log.debug('Created repo %s with %s backend', |
|
870 | 876 | safe_unicode(repo_name), safe_unicode(repo_type)) |
|
871 | 877 | return repo |
|
872 | 878 | |
|
873 | 879 | def _rename_filesystem_repo(self, old, new): |
|
874 | 880 | """ |
|
875 | 881 | renames repository on filesystem |
|
876 | 882 | |
|
877 | 883 | :param old: old name |
|
878 | 884 | :param new: new name |
|
879 | 885 | """ |
|
880 | 886 | log.info('renaming repo from %s to %s', old, new) |
|
881 | 887 | |
|
882 | 888 | old_path = os.path.join(self.repos_path, old) |
|
883 | 889 | new_path = os.path.join(self.repos_path, new) |
|
884 | 890 | if os.path.isdir(new_path): |
|
885 | 891 | raise Exception( |
|
886 | 892 | 'Was trying to rename to already existing dir %s' % new_path |
|
887 | 893 | ) |
|
888 | 894 | shutil.move(old_path, new_path) |
|
889 | 895 | |
|
890 | 896 | def _delete_filesystem_repo(self, repo): |
|
891 | 897 | """ |
|
892 | 898 | removes repo from filesystem, the removal is acctually made by |
|
893 | 899 | added rm__ prefix into dir, and rename internat .hg/.git dirs so this |
|
894 | 900 | repository is no longer valid for rhodecode, can be undeleted later on |
|
895 | 901 | by reverting the renames on this repository |
|
896 | 902 | |
|
897 | 903 | :param repo: repo object |
|
898 | 904 | """ |
|
899 | 905 | rm_path = os.path.join(self.repos_path, repo.repo_name) |
|
900 | 906 | repo_group = repo.group |
|
901 | 907 | log.info("Removing repository %s", rm_path) |
|
902 | 908 | # disable hg/git internal that it doesn't get detected as repo |
|
903 | 909 | alias = repo.repo_type |
|
904 | 910 | |
|
905 | 911 | config = make_db_config(clear_session=False) |
|
906 | 912 | config.set('extensions', 'largefiles', '') |
|
907 | 913 | bare = getattr(repo.scm_instance(config=config), 'bare', False) |
|
908 | 914 | |
|
909 | 915 | # skip this for bare git repos |
|
910 | 916 | if not bare: |
|
911 | 917 | # disable VCS repo |
|
912 | 918 | vcs_path = os.path.join(rm_path, '.%s' % alias) |
|
913 | 919 | if os.path.exists(vcs_path): |
|
914 | 920 | shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias)) |
|
915 | 921 | |
|
916 | 922 | _now = datetime.datetime.now() |
|
917 | 923 | _ms = str(_now.microsecond).rjust(6, '0') |
|
918 | 924 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), |
|
919 | 925 | repo.just_name) |
|
920 | 926 | if repo_group: |
|
921 | 927 | # if repository is in group, prefix the removal path with the group |
|
922 | 928 | args = repo_group.full_path_splitted + [_d] |
|
923 | 929 | _d = os.path.join(*args) |
|
924 | 930 | |
|
925 | 931 | if os.path.isdir(rm_path): |
|
926 | 932 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) |
|
927 | 933 | |
|
928 | 934 | # finally cleanup diff-cache if it exists |
|
929 | 935 | cached_diffs_dir = repo.cached_diffs_dir |
|
930 | 936 | if os.path.isdir(cached_diffs_dir): |
|
931 | 937 | shutil.rmtree(cached_diffs_dir) |
|
932 | 938 | |
|
933 | 939 | |
|
934 | 940 | class ReadmeFinder: |
|
935 | 941 | """ |
|
936 | 942 | Utility which knows how to find a readme for a specific commit. |
|
937 | 943 | |
|
938 | 944 | The main idea is that this is a configurable algorithm. When creating an |
|
939 | 945 | instance you can define parameters, currently only the `default_renderer`. |
|
940 | 946 | Based on this configuration the method :meth:`search` behaves slightly |
|
941 | 947 | different. |
|
942 | 948 | """ |
|
943 | 949 | |
|
944 | 950 | readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE) |
|
945 | 951 | path_re = re.compile(r'^docs?', re.IGNORECASE) |
|
946 | 952 | |
|
947 | 953 | default_priorities = { |
|
948 | 954 | None: 0, |
|
949 | 955 | '.text': 2, |
|
950 | 956 | '.txt': 3, |
|
951 | 957 | '.rst': 1, |
|
952 | 958 | '.rest': 2, |
|
953 | 959 | '.md': 1, |
|
954 | 960 | '.mkdn': 2, |
|
955 | 961 | '.mdown': 3, |
|
956 | 962 | '.markdown': 4, |
|
957 | 963 | } |
|
958 | 964 | |
|
959 | 965 | path_priority = { |
|
960 | 966 | 'doc': 0, |
|
961 | 967 | 'docs': 1, |
|
962 | 968 | } |
|
963 | 969 | |
|
964 | 970 | FALLBACK_PRIORITY = 99 |
|
965 | 971 | |
|
966 | 972 | RENDERER_TO_EXTENSION = { |
|
967 | 973 | 'rst': ['.rst', '.rest'], |
|
968 | 974 | 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'], |
|
969 | 975 | } |
|
970 | 976 | |
|
971 | 977 | def __init__(self, default_renderer=None): |
|
972 | 978 | self._default_renderer = default_renderer |
|
973 | 979 | self._renderer_extensions = self.RENDERER_TO_EXTENSION.get( |
|
974 | 980 | default_renderer, []) |
|
975 | 981 | |
|
976 | 982 | def search(self, commit, path='/'): |
|
977 | 983 | """ |
|
978 | 984 | Find a readme in the given `commit`. |
|
979 | 985 | """ |
|
980 | 986 | nodes = commit.get_nodes(path) |
|
981 | 987 | matches = self._match_readmes(nodes) |
|
982 | 988 | matches = self._sort_according_to_priority(matches) |
|
983 | 989 | if matches: |
|
984 | 990 | return matches[0].node |
|
985 | 991 | |
|
986 | 992 | paths = self._match_paths(nodes) |
|
987 | 993 | paths = self._sort_paths_according_to_priority(paths) |
|
988 | 994 | for path in paths: |
|
989 | 995 | match = self.search(commit, path=path) |
|
990 | 996 | if match: |
|
991 | 997 | return match |
|
992 | 998 | |
|
993 | 999 | return None |
|
994 | 1000 | |
|
995 | 1001 | def _match_readmes(self, nodes): |
|
996 | 1002 | for node in nodes: |
|
997 | 1003 | if not node.is_file(): |
|
998 | 1004 | continue |
|
999 | 1005 | path = node.path.rsplit('/', 1)[-1] |
|
1000 | 1006 | match = self.readme_re.match(path) |
|
1001 | 1007 | if match: |
|
1002 | 1008 | extension = match.group(1) |
|
1003 | 1009 | yield ReadmeMatch(node, match, self._priority(extension)) |
|
1004 | 1010 | |
|
1005 | 1011 | def _match_paths(self, nodes): |
|
1006 | 1012 | for node in nodes: |
|
1007 | 1013 | if not node.is_dir(): |
|
1008 | 1014 | continue |
|
1009 | 1015 | match = self.path_re.match(node.path) |
|
1010 | 1016 | if match: |
|
1011 | 1017 | yield node.path |
|
1012 | 1018 | |
|
1013 | 1019 | def _priority(self, extension): |
|
1014 | 1020 | renderer_priority = ( |
|
1015 | 1021 | 0 if extension in self._renderer_extensions else 1) |
|
1016 | 1022 | extension_priority = self.default_priorities.get( |
|
1017 | 1023 | extension, self.FALLBACK_PRIORITY) |
|
1018 | 1024 | return (renderer_priority, extension_priority) |
|
1019 | 1025 | |
|
1020 | 1026 | def _sort_according_to_priority(self, matches): |
|
1021 | 1027 | |
|
1022 | 1028 | def priority_and_path(match): |
|
1023 | 1029 | return (match.priority, match.path) |
|
1024 | 1030 | |
|
1025 | 1031 | return sorted(matches, key=priority_and_path) |
|
1026 | 1032 | |
|
1027 | 1033 | def _sort_paths_according_to_priority(self, paths): |
|
1028 | 1034 | |
|
1029 | 1035 | def priority_and_path(path): |
|
1030 | 1036 | return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path) |
|
1031 | 1037 | |
|
1032 | 1038 | return sorted(paths, key=priority_and_path) |
|
1033 | 1039 | |
|
1034 | 1040 | |
|
1035 | 1041 | class ReadmeMatch: |
|
1036 | 1042 | |
|
1037 | 1043 | def __init__(self, node, match, priority): |
|
1038 | 1044 | self.node = node |
|
1039 | 1045 | self._match = match |
|
1040 | 1046 | self.priority = priority |
|
1041 | 1047 | |
|
1042 | 1048 | @property |
|
1043 | 1049 | def path(self): |
|
1044 | 1050 | return self.node.path |
|
1045 | 1051 | |
|
1046 | 1052 | def __repr__(self): |
|
1047 | 1053 | return '<ReadmeMatch {} priority={}'.format(self.path, self.priority) |
@@ -1,211 +1,225 b'' | |||
|
1 | 1 | <%namespace name="base" file="/base/base.mako"/> |
|
2 | 2 | |
|
3 | 3 | <% |
|
4 | 4 | elems = [ |
|
5 | 5 | (_('Owner'), lambda:base.gravatar_with_user(c.rhodecode_db_repo.user.email), '', ''), |
|
6 | 6 | (_('Created on'), h.format_date(c.rhodecode_db_repo.created_on), '', ''), |
|
7 | 7 | (_('Updated on'), h.format_date(c.rhodecode_db_repo.updated_on), '', ''), |
|
8 | 8 | (_('Cached Commit id'), lambda: h.link_to(c.rhodecode_db_repo.changeset_cache.get('short_id'), h.route_path('repo_commit',repo_name=c.repo_name,commit_id=c.rhodecode_db_repo.changeset_cache.get('raw_id'))), '', ''), |
|
9 | 9 | (_('Attached scoped tokens'), len(c.rhodecode_db_repo.scoped_tokens), '', [x.user for x in c.rhodecode_db_repo.scoped_tokens]), |
|
10 | (_('Pull requests source'), len(c.rhodecode_db_repo.pull_requests_source), '', ['pr_id:{}, repo:{}'.format(x.pull_request_id,x.source_repo.repo_name) for x in c.rhodecode_db_repo.pull_requests_source]), | |
|
11 | (_('Pull requests target'), len(c.rhodecode_db_repo.pull_requests_target), '', ['pr_id:{}, repo:{}'.format(x.pull_request_id,x.target_repo.repo_name) for x in c.rhodecode_db_repo.pull_requests_target]), | |
|
10 | 12 | ] |
|
11 | 13 | %> |
|
12 | 14 | |
|
13 | 15 | <div class="panel panel-default"> |
|
14 | 16 | <div class="panel-heading" id="advanced-info" > |
|
15 | 17 | <h3 class="panel-title">${_('Repository: %s') % c.rhodecode_db_repo.repo_name} <a class="permalink" href="#advanced-info"> ΒΆ</a></h3> |
|
16 | 18 | </div> |
|
17 | 19 | <div class="panel-body"> |
|
18 | 20 | ${base.dt_info_panel(elems)} |
|
19 | 21 | </div> |
|
20 | 22 | </div> |
|
21 | 23 | |
|
22 | 24 | |
|
23 | 25 | <div class="panel panel-default"> |
|
24 | 26 | <div class="panel-heading" id="advanced-fork"> |
|
25 | 27 | <h3 class="panel-title">${_('Fork Reference')} <a class="permalink" href="#advanced-fork"> ΒΆ</a></h3> |
|
26 | 28 | </div> |
|
27 | 29 | <div class="panel-body"> |
|
28 | 30 | ${h.secure_form(h.route_path('edit_repo_advanced_fork', repo_name=c.rhodecode_db_repo.repo_name), request=request)} |
|
29 | 31 | |
|
30 | 32 | % if c.rhodecode_db_repo.fork: |
|
31 | 33 | <div class="panel-body-title-text">${h.literal(_('This repository is a fork of %(repo_link)s') % {'repo_link': h.link_to_if(c.has_origin_repo_read_perm,c.rhodecode_db_repo.fork.repo_name, h.route_path('repo_summary', repo_name=c.rhodecode_db_repo.fork.repo_name))})} |
|
32 | 34 | | <button class="btn btn-link btn-danger" type="submit">Remove fork reference</button></div> |
|
33 | 35 | % endif |
|
34 | 36 | |
|
35 | 37 | <div class="field"> |
|
36 | 38 | ${h.hidden('id_fork_of')} |
|
37 | 39 | ${h.submit('set_as_fork_%s' % c.rhodecode_db_repo.repo_name,_('Set'),class_="btn btn-small",)} |
|
38 | 40 | </div> |
|
39 | 41 | <div class="field"> |
|
40 | 42 | <span class="help-block">${_('Manually set this repository as a fork of another from the list')}</span> |
|
41 | 43 | </div> |
|
42 | 44 | ${h.end_form()} |
|
43 | 45 | </div> |
|
44 | 46 | </div> |
|
45 | 47 | |
|
46 | 48 | |
|
47 | 49 | <div class="panel panel-default"> |
|
48 | 50 | <div class="panel-heading" id="advanced-journal"> |
|
49 | 51 | <h3 class="panel-title">${_('Public Journal Visibility')} <a class="permalink" href="#advanced-journal"> ΒΆ</a></h3> |
|
50 | 52 | </div> |
|
51 | 53 | <div class="panel-body"> |
|
52 | 54 | ${h.secure_form(h.route_path('edit_repo_advanced_journal', repo_name=c.rhodecode_db_repo.repo_name), request=request)} |
|
53 | 55 | <div class="field"> |
|
54 | 56 | %if c.in_public_journal: |
|
55 | 57 | <button class="btn btn-small" type="submit"> |
|
56 | 58 | ${_('Remove from Public Journal')} |
|
57 | 59 | </button> |
|
58 | 60 | %else: |
|
59 | 61 | <button class="btn btn-small" type="submit"> |
|
60 | 62 | ${_('Add to Public Journal')} |
|
61 | 63 | </button> |
|
62 | 64 | %endif |
|
63 | 65 | </div> |
|
64 | 66 | <div class="field" > |
|
65 | 67 | <span class="help-block">${_('All actions made on this repository will be visible to everyone following the public journal.')}</span> |
|
66 | 68 | </div> |
|
67 | 69 | ${h.end_form()} |
|
68 | 70 | </div> |
|
69 | 71 | </div> |
|
70 | 72 | |
|
71 | 73 | |
|
72 | 74 | <div class="panel panel-default"> |
|
73 | 75 | <div class="panel-heading" id="advanced-locking"> |
|
74 | 76 | <h3 class="panel-title">${_('Locking state')} <a class="permalink" href="#advanced-locking"> ΒΆ</a></h3> |
|
75 | 77 | </div> |
|
76 | 78 | <div class="panel-body"> |
|
77 | 79 | ${h.secure_form(h.route_path('edit_repo_advanced_locking', repo_name=c.rhodecode_db_repo.repo_name), request=request)} |
|
78 | 80 | |
|
79 | 81 | %if c.rhodecode_db_repo.locked[0]: |
|
80 | 82 | <div class="panel-body-title-text">${'Locked by %s on %s. Lock reason: %s' % (h.person_by_id(c.rhodecode_db_repo.locked[0]), |
|
81 | 83 | h.format_date(h. time_to_datetime(c.rhodecode_db_repo.locked[1])), c.rhodecode_db_repo.locked[2])}</div> |
|
82 | 84 | %else: |
|
83 | 85 | <div class="panel-body-title-text">${_('This Repository is not currently locked.')}</div> |
|
84 | 86 | %endif |
|
85 | 87 | |
|
86 | 88 | <div class="field" > |
|
87 | 89 | %if c.rhodecode_db_repo.locked[0]: |
|
88 | 90 | ${h.hidden('set_unlock', '1')} |
|
89 | 91 | <button class="btn btn-small" type="submit" |
|
90 | 92 | onclick="return confirm('${_('Confirm to unlock repository.')}');"> |
|
91 | 93 | <i class="icon-unlock"></i> |
|
92 | 94 | ${_('Unlock repository')} |
|
93 | 95 | </button> |
|
94 | 96 | %else: |
|
95 | 97 | ${h.hidden('set_lock', '1')} |
|
96 | 98 | <button class="btn btn-small" type="submit" |
|
97 | 99 | onclick="return confirm('${_('Confirm to lock repository.')}');"> |
|
98 | 100 | <i class="icon-lock"></i> |
|
99 | 101 | ${_('Lock Repository')} |
|
100 | 102 | </button> |
|
101 | 103 | %endif |
|
102 | 104 | </div> |
|
103 | 105 | <div class="field" > |
|
104 | 106 | <span class="help-block"> |
|
105 | 107 | ${_('Force repository locking. This only works when anonymous access is disabled. Pulling from the repository locks the repository to that user until the same user pushes to that repository again.')} |
|
106 | 108 | </span> |
|
107 | 109 | </div> |
|
108 | 110 | ${h.end_form()} |
|
109 | 111 | </div> |
|
110 | 112 | </div> |
|
111 | 113 | |
|
112 | 114 | <div class="panel panel-danger"> |
|
113 | 115 | <div class="panel-heading" id="advanced-delete"> |
|
114 | 116 | <h3 class="panel-title">${_('Delete repository')} <a class="permalink" href="#advanced-delete"> ΒΆ</a></h3> |
|
115 | 117 | </div> |
|
116 | 118 | <div class="panel-body"> |
|
117 | 119 | ${h.secure_form(h.route_path('edit_repo_advanced_delete', repo_name=c.repo_name), request=request)} |
|
118 | 120 | <table class="display"> |
|
119 | 121 | <tr> |
|
120 | 122 | <td> |
|
121 | 123 | ${_ungettext('This repository has %s fork.', 'This repository has %s forks.', c.rhodecode_db_repo.forks.count()) % c.rhodecode_db_repo.forks.count()} |
|
122 | 124 | </td> |
|
123 | 125 | <td> |
|
124 | 126 | %if c.rhodecode_db_repo.forks.count(): |
|
125 | 127 | <input type="radio" name="forks" value="detach_forks" checked="checked"/> <label for="forks">${_('Detach forks')}</label> |
|
126 | 128 | %endif |
|
127 | 129 | </td> |
|
128 | 130 | <td> |
|
129 | 131 | %if c.rhodecode_db_repo.forks.count(): |
|
130 | 132 | <input type="radio" name="forks" value="delete_forks"/> <label for="forks">${_('Delete forks')}</label> |
|
131 | 133 | %endif |
|
132 | 134 | </td> |
|
133 | 135 | </tr> |
|
136 | <% attached_prs = len(c.rhodecode_db_repo.pull_requests_source + c.rhodecode_db_repo.pull_requests_target) %> | |
|
137 | % if c.rhodecode_db_repo.pull_requests_source or c.rhodecode_db_repo.pull_requests_target: | |
|
138 | <tr> | |
|
139 | <td> | |
|
140 | ${_ungettext('This repository has %s attached pull request.', 'This repository has %s attached pull requests.', attached_prs) % attached_prs} | |
|
141 | <br/> | |
|
142 | ${_('Consider to archive this repository instead.')} | |
|
143 | </td> | |
|
144 | <td></td> | |
|
145 | <td></td> | |
|
146 | </tr> | |
|
147 | % endif | |
|
134 | 148 | </table> |
|
135 | 149 | <div style="margin: 0 0 20px 0" class="fake-space"></div> |
|
136 | 150 | |
|
137 | 151 | <div class="field"> |
|
138 | 152 | <button class="btn btn-small btn-danger" type="submit" |
|
139 | 153 | onclick="return confirm('${_('Confirm to delete this repository: %s') % c.repo_name}');"> |
|
140 | 154 | <i class="icon-remove-sign"></i> |
|
141 | 155 | ${_('Delete This Repository')} |
|
142 | 156 | </button> |
|
143 | 157 | </div> |
|
144 | 158 | <div class="field"> |
|
145 | 159 | <span class="help-block"> |
|
146 | 160 | ${_('This repository will be renamed in a special way in order to make it inaccessible to RhodeCode Enterprise and its VCS systems. If you need to fully delete it from the file system, please do it manually, or with rhodecode-cleanup-repos command available in rhodecode-tools.')} |
|
147 | 161 | </span> |
|
148 | 162 | </div> |
|
149 | 163 | |
|
150 | 164 | ${h.end_form()} |
|
151 | 165 | </div> |
|
152 | 166 | </div> |
|
153 | 167 | |
|
154 | 168 | |
|
155 | 169 | <script> |
|
156 | 170 | |
|
157 | 171 | var currentRepoId = ${c.rhodecode_db_repo.repo_id}; |
|
158 | 172 | |
|
159 | 173 | var repoTypeFilter = function(data) { |
|
160 | 174 | var results = []; |
|
161 | 175 | |
|
162 | 176 | if (!data.results[0]) { |
|
163 | 177 | return data |
|
164 | 178 | } |
|
165 | 179 | |
|
166 | 180 | $.each(data.results[0].children, function() { |
|
167 | 181 | // filter out the SAME repo, it cannot be used as fork of itself |
|
168 | 182 | if (this.repo_id != currentRepoId) { |
|
169 | 183 | this.id = this.repo_id; |
|
170 | 184 | results.push(this) |
|
171 | 185 | } |
|
172 | 186 | }); |
|
173 | 187 | data.results[0].children = results; |
|
174 | 188 | return data; |
|
175 | 189 | }; |
|
176 | 190 | |
|
177 | 191 | $("#id_fork_of").select2({ |
|
178 | 192 | cachedDataSource: {}, |
|
179 | 193 | minimumInputLength: 2, |
|
180 | 194 | placeholder: "${_('Change repository') if c.rhodecode_db_repo.fork else _('Pick repository')}", |
|
181 | 195 | dropdownAutoWidth: true, |
|
182 | 196 | containerCssClass: "drop-menu", |
|
183 | 197 | dropdownCssClass: "drop-menu-dropdown", |
|
184 | 198 | formatResult: formatRepoResult, |
|
185 | 199 | query: $.debounce(250, function(query){ |
|
186 | 200 | self = this; |
|
187 | 201 | var cacheKey = query.term; |
|
188 | 202 | var cachedData = self.cachedDataSource[cacheKey]; |
|
189 | 203 | |
|
190 | 204 | if (cachedData) { |
|
191 | 205 | query.callback({results: cachedData.results}); |
|
192 | 206 | } else { |
|
193 | 207 | $.ajax({ |
|
194 | 208 | url: pyroutes.url('repo_list_data'), |
|
195 | 209 | data: {'query': query.term, repo_type: '${c.rhodecode_db_repo.repo_type}'}, |
|
196 | 210 | dataType: 'json', |
|
197 | 211 | type: 'GET', |
|
198 | 212 | success: function(data) { |
|
199 | 213 | data = repoTypeFilter(data); |
|
200 | 214 | self.cachedDataSource[cacheKey] = data; |
|
201 | 215 | query.callback({results: data.results}); |
|
202 | 216 | }, |
|
203 | 217 | error: function(data, textStatus, errorThrown) { |
|
204 | 218 | alert("Error while fetching entries.\nError code {0} ({1}).".format(data.status, data.statusText)); |
|
205 | 219 | } |
|
206 | 220 | }) |
|
207 | 221 | } |
|
208 | 222 | }) |
|
209 | 223 | }); |
|
210 | 224 | </script> |
|
211 | 225 |
@@ -1,349 +1,349 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Helpers for fixture generation |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import os |
|
26 | 26 | import time |
|
27 | 27 | import tempfile |
|
28 | 28 | import shutil |
|
29 | 29 | |
|
30 | 30 | import configobj |
|
31 | 31 | |
|
32 | 32 | from rhodecode.tests import * |
|
33 | 33 | from rhodecode.model.db import Repository, User, RepoGroup, UserGroup, Gist, UserEmailMap |
|
34 | 34 | from rhodecode.model.meta import Session |
|
35 | 35 | from rhodecode.model.repo import RepoModel |
|
36 | 36 | from rhodecode.model.user import UserModel |
|
37 | 37 | from rhodecode.model.repo_group import RepoGroupModel |
|
38 | 38 | from rhodecode.model.user_group import UserGroupModel |
|
39 | 39 | from rhodecode.model.gist import GistModel |
|
40 | 40 | from rhodecode.model.auth_token import AuthTokenModel |
|
41 | 41 | |
|
42 | 42 | dn = os.path.dirname |
|
43 | 43 | FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'tests', 'fixtures') |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | def error_function(*args, **kwargs): |
|
47 | 47 | raise Exception('Total Crash !') |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | class TestINI(object): |
|
51 | 51 | """ |
|
52 | 52 | Allows to create a new test.ini file as a copy of existing one with edited |
|
53 | 53 | data. Example usage:: |
|
54 | 54 | |
|
55 | 55 | with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path: |
|
56 | 56 | print('paster server %s' % new_test_ini) |
|
57 | 57 | """ |
|
58 | 58 | |
|
59 | 59 | def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT', |
|
60 | 60 | destroy=True, dir=None): |
|
61 | 61 | self.ini_file_path = ini_file_path |
|
62 | 62 | self.ini_params = ini_params |
|
63 | 63 | self.new_path = None |
|
64 | 64 | self.new_path_prefix = new_file_prefix |
|
65 | 65 | self._destroy = destroy |
|
66 | 66 | self._dir = dir |
|
67 | 67 | |
|
68 | 68 | def __enter__(self): |
|
69 | 69 | return self.create() |
|
70 | 70 | |
|
71 | 71 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
72 | 72 | self.destroy() |
|
73 | 73 | |
|
74 | 74 | def create(self): |
|
75 | 75 | config = configobj.ConfigObj( |
|
76 | 76 | self.ini_file_path, file_error=True, write_empty_values=True) |
|
77 | 77 | |
|
78 | 78 | for data in self.ini_params: |
|
79 | 79 | section, ini_params = data.items()[0] |
|
80 | 80 | for key, val in ini_params.items(): |
|
81 | 81 | config[section][key] = val |
|
82 | 82 | with tempfile.NamedTemporaryFile( |
|
83 | 83 | prefix=self.new_path_prefix, suffix='.ini', dir=self._dir, |
|
84 | 84 | delete=False) as new_ini_file: |
|
85 | 85 | config.write(new_ini_file) |
|
86 | 86 | self.new_path = new_ini_file.name |
|
87 | 87 | |
|
88 | 88 | return self.new_path |
|
89 | 89 | |
|
90 | 90 | def destroy(self): |
|
91 | 91 | if self._destroy: |
|
92 | 92 | os.remove(self.new_path) |
|
93 | 93 | |
|
94 | 94 | |
|
95 | 95 | class Fixture(object): |
|
96 | 96 | |
|
97 | 97 | def anon_access(self, status): |
|
98 | 98 | """ |
|
99 | 99 | Context process for disabling anonymous access. use like: |
|
100 | 100 | fixture = Fixture() |
|
101 | 101 | with fixture.anon_access(False): |
|
102 | 102 | #tests |
|
103 | 103 | |
|
104 | 104 | after this block anon access will be set to `not status` |
|
105 | 105 | """ |
|
106 | 106 | |
|
107 | 107 | class context(object): |
|
108 | 108 | def __enter__(self): |
|
109 | 109 | anon = User.get_default_user() |
|
110 | 110 | anon.active = status |
|
111 | 111 | Session().add(anon) |
|
112 | 112 | Session().commit() |
|
113 | 113 | time.sleep(1.5) # must sleep for cache (1s to expire) |
|
114 | 114 | |
|
115 | 115 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
116 | 116 | anon = User.get_default_user() |
|
117 | 117 | anon.active = not status |
|
118 | 118 | Session().add(anon) |
|
119 | 119 | Session().commit() |
|
120 | 120 | |
|
121 | 121 | return context() |
|
122 | 122 | |
|
123 | 123 | def _get_repo_create_params(self, **custom): |
|
124 | 124 | defs = { |
|
125 | 125 | 'repo_name': None, |
|
126 | 126 | 'repo_type': 'hg', |
|
127 | 127 | 'clone_uri': '', |
|
128 | 128 | 'push_uri': '', |
|
129 | 129 | 'repo_group': '-1', |
|
130 | 130 | 'repo_description': 'DESC', |
|
131 | 131 | 'repo_private': False, |
|
132 | 132 | 'repo_landing_rev': 'rev:tip', |
|
133 | 133 | 'repo_copy_permissions': False, |
|
134 | 134 | 'repo_state': Repository.STATE_CREATED, |
|
135 | 135 | } |
|
136 | 136 | defs.update(custom) |
|
137 | 137 | if 'repo_name_full' not in custom: |
|
138 | 138 | defs.update({'repo_name_full': defs['repo_name']}) |
|
139 | 139 | |
|
140 | 140 | # fix the repo name if passed as repo_name_full |
|
141 | 141 | if defs['repo_name']: |
|
142 | 142 | defs['repo_name'] = defs['repo_name'].split('/')[-1] |
|
143 | 143 | |
|
144 | 144 | return defs |
|
145 | 145 | |
|
146 | 146 | def _get_group_create_params(self, **custom): |
|
147 | 147 | defs = { |
|
148 | 148 | 'group_name': None, |
|
149 | 149 | 'group_description': 'DESC', |
|
150 | 150 | 'perm_updates': [], |
|
151 | 151 | 'perm_additions': [], |
|
152 | 152 | 'perm_deletions': [], |
|
153 | 153 | 'group_parent_id': -1, |
|
154 | 154 | 'enable_locking': False, |
|
155 | 155 | 'recursive': False, |
|
156 | 156 | } |
|
157 | 157 | defs.update(custom) |
|
158 | 158 | |
|
159 | 159 | return defs |
|
160 | 160 | |
|
161 | 161 | def _get_user_create_params(self, name, **custom): |
|
162 | 162 | defs = { |
|
163 | 163 | 'username': name, |
|
164 | 164 | 'password': 'qweqwe', |
|
165 | 165 | 'email': '%s+test@rhodecode.org' % name, |
|
166 | 166 | 'firstname': 'TestUser', |
|
167 | 167 | 'lastname': 'Test', |
|
168 | 168 | 'active': True, |
|
169 | 169 | 'admin': False, |
|
170 | 170 | 'extern_type': 'rhodecode', |
|
171 | 171 | 'extern_name': None, |
|
172 | 172 | } |
|
173 | 173 | defs.update(custom) |
|
174 | 174 | |
|
175 | 175 | return defs |
|
176 | 176 | |
|
177 | 177 | def _get_user_group_create_params(self, name, **custom): |
|
178 | 178 | defs = { |
|
179 | 179 | 'users_group_name': name, |
|
180 | 180 | 'user_group_description': 'DESC', |
|
181 | 181 | 'users_group_active': True, |
|
182 | 182 | 'user_group_data': {}, |
|
183 | 183 | } |
|
184 | 184 | defs.update(custom) |
|
185 | 185 | |
|
186 | 186 | return defs |
|
187 | 187 | |
|
188 | 188 | def create_repo(self, name, **kwargs): |
|
189 | 189 | repo_group = kwargs.get('repo_group') |
|
190 | 190 | if isinstance(repo_group, RepoGroup): |
|
191 | 191 | kwargs['repo_group'] = repo_group.group_id |
|
192 | 192 | name = name.split(Repository.NAME_SEP)[-1] |
|
193 | 193 | name = Repository.NAME_SEP.join((repo_group.group_name, name)) |
|
194 | 194 | |
|
195 | 195 | if 'skip_if_exists' in kwargs: |
|
196 | 196 | del kwargs['skip_if_exists'] |
|
197 | 197 | r = Repository.get_by_repo_name(name) |
|
198 | 198 | if r: |
|
199 | 199 | return r |
|
200 | 200 | |
|
201 | 201 | form_data = self._get_repo_create_params(repo_name=name, **kwargs) |
|
202 | 202 | cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
203 | 203 | RepoModel().create(form_data, cur_user) |
|
204 | 204 | Session().commit() |
|
205 | 205 | repo = Repository.get_by_repo_name(name) |
|
206 | 206 | assert repo |
|
207 | 207 | return repo |
|
208 | 208 | |
|
209 | 209 | def create_fork(self, repo_to_fork, fork_name, **kwargs): |
|
210 | 210 | repo_to_fork = Repository.get_by_repo_name(repo_to_fork) |
|
211 | 211 | |
|
212 | 212 | form_data = self._get_repo_create_params(repo_name=fork_name, |
|
213 | 213 | fork_parent_id=repo_to_fork.repo_id, |
|
214 | 214 | repo_type=repo_to_fork.repo_type, |
|
215 | 215 | **kwargs) |
|
216 | 216 | #TODO: fix it !! |
|
217 | 217 | form_data['description'] = form_data['repo_description'] |
|
218 | 218 | form_data['private'] = form_data['repo_private'] |
|
219 | 219 | form_data['landing_rev'] = form_data['repo_landing_rev'] |
|
220 | 220 | |
|
221 | 221 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
222 | 222 | RepoModel().create_fork(form_data, cur_user=owner) |
|
223 | 223 | Session().commit() |
|
224 | 224 | r = Repository.get_by_repo_name(fork_name) |
|
225 | 225 | assert r |
|
226 | 226 | return r |
|
227 | 227 | |
|
228 | 228 | def destroy_repo(self, repo_name, **kwargs): |
|
229 | RepoModel().delete(repo_name, **kwargs) | |
|
229 | RepoModel().delete(repo_name, pull_requests='delete', **kwargs) | |
|
230 | 230 | Session().commit() |
|
231 | 231 | |
|
232 | 232 | def destroy_repo_on_filesystem(self, repo_name): |
|
233 | 233 | rm_path = os.path.join(RepoModel().repos_path, repo_name) |
|
234 | 234 | if os.path.isdir(rm_path): |
|
235 | 235 | shutil.rmtree(rm_path) |
|
236 | 236 | |
|
237 | 237 | def create_repo_group(self, name, **kwargs): |
|
238 | 238 | if 'skip_if_exists' in kwargs: |
|
239 | 239 | del kwargs['skip_if_exists'] |
|
240 | 240 | gr = RepoGroup.get_by_group_name(group_name=name) |
|
241 | 241 | if gr: |
|
242 | 242 | return gr |
|
243 | 243 | form_data = self._get_group_create_params(group_name=name, **kwargs) |
|
244 | 244 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
245 | 245 | gr = RepoGroupModel().create( |
|
246 | 246 | group_name=form_data['group_name'], |
|
247 | 247 | group_description=form_data['group_name'], |
|
248 | 248 | owner=owner) |
|
249 | 249 | Session().commit() |
|
250 | 250 | gr = RepoGroup.get_by_group_name(gr.group_name) |
|
251 | 251 | return gr |
|
252 | 252 | |
|
253 | 253 | def destroy_repo_group(self, repogroupid): |
|
254 | 254 | RepoGroupModel().delete(repogroupid) |
|
255 | 255 | Session().commit() |
|
256 | 256 | |
|
257 | 257 | def create_user(self, name, **kwargs): |
|
258 | 258 | if 'skip_if_exists' in kwargs: |
|
259 | 259 | del kwargs['skip_if_exists'] |
|
260 | 260 | user = User.get_by_username(name) |
|
261 | 261 | if user: |
|
262 | 262 | return user |
|
263 | 263 | form_data = self._get_user_create_params(name, **kwargs) |
|
264 | 264 | user = UserModel().create(form_data) |
|
265 | 265 | |
|
266 | 266 | # create token for user |
|
267 | 267 | AuthTokenModel().create( |
|
268 | 268 | user=user, description=u'TEST_USER_TOKEN') |
|
269 | 269 | |
|
270 | 270 | Session().commit() |
|
271 | 271 | user = User.get_by_username(user.username) |
|
272 | 272 | return user |
|
273 | 273 | |
|
274 | 274 | def destroy_user(self, userid): |
|
275 | 275 | UserModel().delete(userid) |
|
276 | 276 | Session().commit() |
|
277 | 277 | |
|
278 | 278 | def create_additional_user_email(self, user, email): |
|
279 | 279 | uem = UserEmailMap() |
|
280 | 280 | uem.user = user |
|
281 | 281 | uem.email = email |
|
282 | 282 | Session().add(uem) |
|
283 | 283 | return uem |
|
284 | 284 | |
|
285 | 285 | def destroy_users(self, userid_iter): |
|
286 | 286 | for user_id in userid_iter: |
|
287 | 287 | if User.get_by_username(user_id): |
|
288 | 288 | UserModel().delete(user_id) |
|
289 | 289 | Session().commit() |
|
290 | 290 | |
|
291 | 291 | def create_user_group(self, name, **kwargs): |
|
292 | 292 | if 'skip_if_exists' in kwargs: |
|
293 | 293 | del kwargs['skip_if_exists'] |
|
294 | 294 | gr = UserGroup.get_by_group_name(group_name=name) |
|
295 | 295 | if gr: |
|
296 | 296 | return gr |
|
297 | 297 | # map active flag to the real attribute. For API consistency of fixtures |
|
298 | 298 | if 'active' in kwargs: |
|
299 | 299 | kwargs['users_group_active'] = kwargs['active'] |
|
300 | 300 | del kwargs['active'] |
|
301 | 301 | form_data = self._get_user_group_create_params(name, **kwargs) |
|
302 | 302 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
303 | 303 | user_group = UserGroupModel().create( |
|
304 | 304 | name=form_data['users_group_name'], |
|
305 | 305 | description=form_data['user_group_description'], |
|
306 | 306 | owner=owner, active=form_data['users_group_active'], |
|
307 | 307 | group_data=form_data['user_group_data']) |
|
308 | 308 | Session().commit() |
|
309 | 309 | user_group = UserGroup.get_by_group_name(user_group.users_group_name) |
|
310 | 310 | return user_group |
|
311 | 311 | |
|
312 | 312 | def destroy_user_group(self, usergroupid): |
|
313 | 313 | UserGroupModel().delete(user_group=usergroupid, force=True) |
|
314 | 314 | Session().commit() |
|
315 | 315 | |
|
316 | 316 | def create_gist(self, **kwargs): |
|
317 | 317 | form_data = { |
|
318 | 318 | 'description': 'new-gist', |
|
319 | 319 | 'owner': TEST_USER_ADMIN_LOGIN, |
|
320 | 320 | 'gist_type': GistModel.cls.GIST_PUBLIC, |
|
321 | 321 | 'lifetime': -1, |
|
322 | 322 | 'acl_level': Gist.ACL_LEVEL_PUBLIC, |
|
323 | 323 | 'gist_mapping': {'filename1.txt': {'content': 'hello world'},} |
|
324 | 324 | } |
|
325 | 325 | form_data.update(kwargs) |
|
326 | 326 | gist = GistModel().create( |
|
327 | 327 | description=form_data['description'], owner=form_data['owner'], |
|
328 | 328 | gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'], |
|
329 | 329 | lifetime=form_data['lifetime'], gist_acl_level=form_data['acl_level'] |
|
330 | 330 | ) |
|
331 | 331 | Session().commit() |
|
332 | 332 | return gist |
|
333 | 333 | |
|
334 | 334 | def destroy_gists(self, gistid=None): |
|
335 | 335 | for g in GistModel.cls.get_all(): |
|
336 | 336 | if gistid: |
|
337 | 337 | if gistid == g.gist_access_id: |
|
338 | 338 | GistModel().delete(g) |
|
339 | 339 | else: |
|
340 | 340 | GistModel().delete(g) |
|
341 | 341 | Session().commit() |
|
342 | 342 | |
|
343 | 343 | def load_resource(self, resource_name, strip=False): |
|
344 | 344 | with open(os.path.join(FIXTURES, resource_name)) as f: |
|
345 | 345 | source = f.read() |
|
346 | 346 | if strip: |
|
347 | 347 | source = source.strip() |
|
348 | 348 | |
|
349 | 349 | return source |
General Comments 0
You need to be logged in to leave comments.
Login now