Show More
@@ -1,248 +1,263 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import logging |
|
21 | import logging | |
22 |
|
22 | |||
23 | from pyramid.view import view_config |
|
23 | from pyramid.view import view_config | |
24 | from pyramid.httpexceptions import HTTPFound |
|
24 | from pyramid.httpexceptions import HTTPFound | |
25 |
|
25 | |||
26 | from rhodecode.apps._base import RepoAppView |
|
26 | from rhodecode.apps._base import RepoAppView | |
27 | from rhodecode.lib import helpers as h |
|
27 | from rhodecode.lib import helpers as h | |
28 | from rhodecode.lib import audit_logger |
|
28 | from rhodecode.lib import audit_logger | |
29 | from rhodecode.lib.auth import ( |
|
29 | from rhodecode.lib.auth import ( | |
30 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired, |
|
30 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired, | |
31 | HasRepoPermissionAny) |
|
31 | HasRepoPermissionAny) | |
32 | from rhodecode.lib.exceptions import AttachedForksError |
|
32 | from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError | |
33 | from rhodecode.lib.utils2 import safe_int |
|
33 | from rhodecode.lib.utils2 import safe_int | |
34 | from rhodecode.lib.vcs import RepositoryError |
|
34 | from rhodecode.lib.vcs import RepositoryError | |
35 | from rhodecode.model.db import Session, UserFollowing, User, Repository |
|
35 | from rhodecode.model.db import Session, UserFollowing, User, Repository | |
36 | from rhodecode.model.repo import RepoModel |
|
36 | from rhodecode.model.repo import RepoModel | |
37 | from rhodecode.model.scm import ScmModel |
|
37 | from rhodecode.model.scm import ScmModel | |
38 |
|
38 | |||
39 | log = logging.getLogger(__name__) |
|
39 | log = logging.getLogger(__name__) | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | class RepoSettingsView(RepoAppView): |
|
42 | class RepoSettingsView(RepoAppView): | |
43 |
|
43 | |||
44 | def load_default_context(self): |
|
44 | def load_default_context(self): | |
45 | c = self._get_local_tmpl_context() |
|
45 | c = self._get_local_tmpl_context() | |
46 | return c |
|
46 | return c | |
47 |
|
47 | |||
48 | @LoginRequired() |
|
48 | @LoginRequired() | |
49 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
49 | @HasRepoPermissionAnyDecorator('repository.admin') | |
50 | @view_config( |
|
50 | @view_config( | |
51 | route_name='edit_repo_advanced', request_method='GET', |
|
51 | route_name='edit_repo_advanced', request_method='GET', | |
52 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
52 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
53 | def edit_advanced(self): |
|
53 | def edit_advanced(self): | |
54 | c = self.load_default_context() |
|
54 | c = self.load_default_context() | |
55 | c.active = 'advanced' |
|
55 | c.active = 'advanced' | |
56 |
|
56 | |||
57 | c.default_user_id = User.get_default_user().user_id |
|
57 | c.default_user_id = User.get_default_user().user_id | |
58 | c.in_public_journal = UserFollowing.query() \ |
|
58 | c.in_public_journal = UserFollowing.query() \ | |
59 | .filter(UserFollowing.user_id == c.default_user_id) \ |
|
59 | .filter(UserFollowing.user_id == c.default_user_id) \ | |
60 | .filter(UserFollowing.follows_repository == self.db_repo).scalar() |
|
60 | .filter(UserFollowing.follows_repository == self.db_repo).scalar() | |
61 |
|
61 | |||
62 | c.has_origin_repo_read_perm = False |
|
62 | c.has_origin_repo_read_perm = False | |
63 | if self.db_repo.fork: |
|
63 | if self.db_repo.fork: | |
64 | c.has_origin_repo_read_perm = h.HasRepoPermissionAny( |
|
64 | c.has_origin_repo_read_perm = h.HasRepoPermissionAny( | |
65 | 'repository.write', 'repository.read', 'repository.admin')( |
|
65 | 'repository.write', 'repository.read', 'repository.admin')( | |
66 | self.db_repo.fork.repo_name, 'repo set as fork page') |
|
66 | self.db_repo.fork.repo_name, 'repo set as fork page') | |
67 |
|
67 | |||
68 | return self._get_template_context(c) |
|
68 | return self._get_template_context(c) | |
69 |
|
69 | |||
70 | @LoginRequired() |
|
70 | @LoginRequired() | |
71 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
71 | @HasRepoPermissionAnyDecorator('repository.admin') | |
72 | @CSRFRequired() |
|
72 | @CSRFRequired() | |
73 | @view_config( |
|
73 | @view_config( | |
74 | route_name='edit_repo_advanced_delete', request_method='POST', |
|
74 | route_name='edit_repo_advanced_delete', request_method='POST', | |
75 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
75 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
76 | def edit_advanced_delete(self): |
|
76 | def edit_advanced_delete(self): | |
77 | """ |
|
77 | """ | |
78 | Deletes the repository, or shows warnings if deletion is not possible |
|
78 | Deletes the repository, or shows warnings if deletion is not possible | |
79 | because of attached forks or other errors. |
|
79 | because of attached forks or other errors. | |
80 | """ |
|
80 | """ | |
81 | _ = self.request.translate |
|
81 | _ = self.request.translate | |
82 | handle_forks = self.request.POST.get('forks', None) |
|
82 | handle_forks = self.request.POST.get('forks', None) | |
|
83 | if handle_forks == 'detach_forks': | |||
|
84 | handle_forks = 'detach' | |||
|
85 | elif handle_forks == 'delete_forks': | |||
|
86 | handle_forks = 'delete' | |||
83 |
|
87 | |||
84 | try: |
|
88 | try: | |
|
89 | old_data = self.db_repo.get_api_data() | |||
|
90 | RepoModel().delete(self.db_repo, forks=handle_forks) | |||
|
91 | ||||
85 | _forks = self.db_repo.forks.count() |
|
92 | _forks = self.db_repo.forks.count() | |
86 | if _forks and handle_forks: |
|
93 | if _forks and handle_forks: | |
87 | if handle_forks == 'detach_forks': |
|
94 | if handle_forks == 'detach_forks': | |
88 | handle_forks = 'detach' |
|
|||
89 | h.flash(_('Detached %s forks') % _forks, category='success') |
|
95 | h.flash(_('Detached %s forks') % _forks, category='success') | |
90 | elif handle_forks == 'delete_forks': |
|
96 | elif handle_forks == 'delete_forks': | |
91 | handle_forks = 'delete' |
|
|||
92 | h.flash(_('Deleted %s forks') % _forks, category='success') |
|
97 | h.flash(_('Deleted %s forks') % _forks, category='success') | |
93 |
|
98 | |||
94 | old_data = self.db_repo.get_api_data() |
|
99 | repo = audit_logger.RepoWrap(repo_id=None, repo_name=self.db_repo.repo_name) | |
95 | RepoModel().delete(self.db_repo, forks=handle_forks) |
|
|||
96 |
|
||||
97 | repo = audit_logger.RepoWrap(repo_id=None, |
|
|||
98 | repo_name=self.db_repo.repo_name) |
|
|||
99 | audit_logger.store_web( |
|
100 | audit_logger.store_web( | |
100 | 'repo.delete', action_data={'old_data': old_data}, |
|
101 | 'repo.delete', action_data={'old_data': old_data}, | |
101 | user=self._rhodecode_user, repo=repo) |
|
102 | user=self._rhodecode_user, repo=repo) | |
102 |
|
103 | |||
103 | ScmModel().mark_for_invalidation(self.db_repo_name, delete=True) |
|
104 | ScmModel().mark_for_invalidation(self.db_repo_name, delete=True) | |
104 | h.flash( |
|
105 | h.flash( | |
105 | _('Deleted repository `%s`') % self.db_repo_name, |
|
106 | _('Deleted repository `%s`') % self.db_repo_name, | |
106 | category='success') |
|
107 | category='success') | |
107 | Session().commit() |
|
108 | Session().commit() | |
108 | except AttachedForksError: |
|
109 | except AttachedForksError: | |
109 | repo_advanced_url = h.route_path( |
|
110 | repo_advanced_url = h.route_path( | |
110 | 'edit_repo_advanced', repo_name=self.db_repo_name, |
|
111 | 'edit_repo_advanced', repo_name=self.db_repo_name, | |
111 | _anchor='advanced-delete') |
|
112 | _anchor='advanced-delete') | |
112 | delete_anchor = h.link_to(_('detach or delete'), repo_advanced_url) |
|
113 | delete_anchor = h.link_to(_('detach or delete'), repo_advanced_url) | |
113 | h.flash(_('Cannot delete `{repo}` it still contains attached forks. ' |
|
114 | h.flash(_('Cannot delete `{repo}` it still contains attached forks. ' | |
114 | 'Try using {delete_or_detach} option.') |
|
115 | 'Try using {delete_or_detach} option.') | |
115 | .format(repo=self.db_repo_name, delete_or_detach=delete_anchor), |
|
116 | .format(repo=self.db_repo_name, delete_or_detach=delete_anchor), | |
116 | category='warning') |
|
117 | category='warning') | |
117 |
|
118 | |||
118 | # redirect to advanced for forks handle action ? |
|
119 | # redirect to advanced for forks handle action ? | |
119 | raise HTTPFound(repo_advanced_url) |
|
120 | raise HTTPFound(repo_advanced_url) | |
120 |
|
121 | |||
|
122 | except AttachedPullRequestsError: | |||
|
123 | repo_advanced_url = h.route_path( | |||
|
124 | 'edit_repo_advanced', repo_name=self.db_repo_name, | |||
|
125 | _anchor='advanced-delete') | |||
|
126 | attached_prs = len(self.db_repo.pull_requests_source + | |||
|
127 | self.db_repo.pull_requests_target) | |||
|
128 | h.flash( | |||
|
129 | _('Cannot delete `{repo}` it still contains {num} attached pull requests. ' | |||
|
130 | 'Consider archiving the repository instead.').format( | |||
|
131 | repo=self.db_repo_name, num=attached_prs), category='warning') | |||
|
132 | ||||
|
133 | # redirect to advanced for forks handle action ? | |||
|
134 | raise HTTPFound(repo_advanced_url) | |||
|
135 | ||||
121 | except Exception: |
|
136 | except Exception: | |
122 | log.exception("Exception during deletion of repository") |
|
137 | log.exception("Exception during deletion of repository") | |
123 | h.flash(_('An error occurred during deletion of `%s`') |
|
138 | h.flash(_('An error occurred during deletion of `%s`') | |
124 | % self.db_repo_name, category='error') |
|
139 | % self.db_repo_name, category='error') | |
125 | # redirect to advanced for more deletion options |
|
140 | # redirect to advanced for more deletion options | |
126 | raise HTTPFound( |
|
141 | raise HTTPFound( | |
127 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name, |
|
142 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name, | |
128 | _anchor='advanced-delete')) |
|
143 | _anchor='advanced-delete')) | |
129 |
|
144 | |||
130 | raise HTTPFound(h.route_path('home')) |
|
145 | raise HTTPFound(h.route_path('home')) | |
131 |
|
146 | |||
132 | @LoginRequired() |
|
147 | @LoginRequired() | |
133 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
148 | @HasRepoPermissionAnyDecorator('repository.admin') | |
134 | @CSRFRequired() |
|
149 | @CSRFRequired() | |
135 | @view_config( |
|
150 | @view_config( | |
136 | route_name='edit_repo_advanced_journal', request_method='POST', |
|
151 | route_name='edit_repo_advanced_journal', request_method='POST', | |
137 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
152 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
138 | def edit_advanced_journal(self): |
|
153 | def edit_advanced_journal(self): | |
139 | """ |
|
154 | """ | |
140 | Set's this repository to be visible in public journal, |
|
155 | Set's this repository to be visible in public journal, | |
141 | in other words making default user to follow this repo |
|
156 | in other words making default user to follow this repo | |
142 | """ |
|
157 | """ | |
143 | _ = self.request.translate |
|
158 | _ = self.request.translate | |
144 |
|
159 | |||
145 | try: |
|
160 | try: | |
146 | user_id = User.get_default_user().user_id |
|
161 | user_id = User.get_default_user().user_id | |
147 | ScmModel().toggle_following_repo(self.db_repo.repo_id, user_id) |
|
162 | ScmModel().toggle_following_repo(self.db_repo.repo_id, user_id) | |
148 | h.flash(_('Updated repository visibility in public journal'), |
|
163 | h.flash(_('Updated repository visibility in public journal'), | |
149 | category='success') |
|
164 | category='success') | |
150 | Session().commit() |
|
165 | Session().commit() | |
151 | except Exception: |
|
166 | except Exception: | |
152 | h.flash(_('An error occurred during setting this ' |
|
167 | h.flash(_('An error occurred during setting this ' | |
153 | 'repository in public journal'), |
|
168 | 'repository in public journal'), | |
154 | category='error') |
|
169 | category='error') | |
155 |
|
170 | |||
156 | raise HTTPFound( |
|
171 | raise HTTPFound( | |
157 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) |
|
172 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) | |
158 |
|
173 | |||
159 | @LoginRequired() |
|
174 | @LoginRequired() | |
160 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
175 | @HasRepoPermissionAnyDecorator('repository.admin') | |
161 | @CSRFRequired() |
|
176 | @CSRFRequired() | |
162 | @view_config( |
|
177 | @view_config( | |
163 | route_name='edit_repo_advanced_fork', request_method='POST', |
|
178 | route_name='edit_repo_advanced_fork', request_method='POST', | |
164 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
179 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
165 | def edit_advanced_fork(self): |
|
180 | def edit_advanced_fork(self): | |
166 | """ |
|
181 | """ | |
167 | Mark given repository as a fork of another |
|
182 | Mark given repository as a fork of another | |
168 | """ |
|
183 | """ | |
169 | _ = self.request.translate |
|
184 | _ = self.request.translate | |
170 |
|
185 | |||
171 | new_fork_id = safe_int(self.request.POST.get('id_fork_of')) |
|
186 | new_fork_id = safe_int(self.request.POST.get('id_fork_of')) | |
172 |
|
187 | |||
173 | # valid repo, re-check permissions |
|
188 | # valid repo, re-check permissions | |
174 | if new_fork_id: |
|
189 | if new_fork_id: | |
175 | repo = Repository.get(new_fork_id) |
|
190 | repo = Repository.get(new_fork_id) | |
176 | # ensure we have at least read access to the repo we mark |
|
191 | # ensure we have at least read access to the repo we mark | |
177 | perm_check = HasRepoPermissionAny( |
|
192 | perm_check = HasRepoPermissionAny( | |
178 | 'repository.read', 'repository.write', 'repository.admin') |
|
193 | 'repository.read', 'repository.write', 'repository.admin') | |
179 |
|
194 | |||
180 | if repo and perm_check(repo_name=repo.repo_name): |
|
195 | if repo and perm_check(repo_name=repo.repo_name): | |
181 | new_fork_id = repo.repo_id |
|
196 | new_fork_id = repo.repo_id | |
182 | else: |
|
197 | else: | |
183 | new_fork_id = None |
|
198 | new_fork_id = None | |
184 |
|
199 | |||
185 | try: |
|
200 | try: | |
186 | repo = ScmModel().mark_as_fork( |
|
201 | repo = ScmModel().mark_as_fork( | |
187 | self.db_repo_name, new_fork_id, self._rhodecode_user.user_id) |
|
202 | self.db_repo_name, new_fork_id, self._rhodecode_user.user_id) | |
188 | fork = repo.fork.repo_name if repo.fork else _('Nothing') |
|
203 | fork = repo.fork.repo_name if repo.fork else _('Nothing') | |
189 | Session().commit() |
|
204 | Session().commit() | |
190 | h.flash( |
|
205 | h.flash( | |
191 | _('Marked repo %s as fork of %s') % (self.db_repo_name, fork), |
|
206 | _('Marked repo %s as fork of %s') % (self.db_repo_name, fork), | |
192 | category='success') |
|
207 | category='success') | |
193 | except RepositoryError as e: |
|
208 | except RepositoryError as e: | |
194 | log.exception("Repository Error occurred") |
|
209 | log.exception("Repository Error occurred") | |
195 | h.flash(str(e), category='error') |
|
210 | h.flash(str(e), category='error') | |
196 | except Exception: |
|
211 | except Exception: | |
197 | log.exception("Exception while editing fork") |
|
212 | log.exception("Exception while editing fork") | |
198 | h.flash(_('An error occurred during this operation'), |
|
213 | h.flash(_('An error occurred during this operation'), | |
199 | category='error') |
|
214 | category='error') | |
200 |
|
215 | |||
201 | raise HTTPFound( |
|
216 | raise HTTPFound( | |
202 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) |
|
217 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) | |
203 |
|
218 | |||
204 | @LoginRequired() |
|
219 | @LoginRequired() | |
205 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
220 | @HasRepoPermissionAnyDecorator('repository.admin') | |
206 | @CSRFRequired() |
|
221 | @CSRFRequired() | |
207 | @view_config( |
|
222 | @view_config( | |
208 | route_name='edit_repo_advanced_locking', request_method='POST', |
|
223 | route_name='edit_repo_advanced_locking', request_method='POST', | |
209 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
224 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
210 | def edit_advanced_locking(self): |
|
225 | def edit_advanced_locking(self): | |
211 | """ |
|
226 | """ | |
212 | Toggle locking of repository |
|
227 | Toggle locking of repository | |
213 | """ |
|
228 | """ | |
214 | _ = self.request.translate |
|
229 | _ = self.request.translate | |
215 | set_lock = self.request.POST.get('set_lock') |
|
230 | set_lock = self.request.POST.get('set_lock') | |
216 | set_unlock = self.request.POST.get('set_unlock') |
|
231 | set_unlock = self.request.POST.get('set_unlock') | |
217 |
|
232 | |||
218 | try: |
|
233 | try: | |
219 | if set_lock: |
|
234 | if set_lock: | |
220 | Repository.lock(self.db_repo, self._rhodecode_user.user_id, |
|
235 | Repository.lock(self.db_repo, self._rhodecode_user.user_id, | |
221 | lock_reason=Repository.LOCK_WEB) |
|
236 | lock_reason=Repository.LOCK_WEB) | |
222 | h.flash(_('Locked repository'), category='success') |
|
237 | h.flash(_('Locked repository'), category='success') | |
223 | elif set_unlock: |
|
238 | elif set_unlock: | |
224 | Repository.unlock(self.db_repo) |
|
239 | Repository.unlock(self.db_repo) | |
225 | h.flash(_('Unlocked repository'), category='success') |
|
240 | h.flash(_('Unlocked repository'), category='success') | |
226 | except Exception as e: |
|
241 | except Exception as e: | |
227 | log.exception("Exception during unlocking") |
|
242 | log.exception("Exception during unlocking") | |
228 | h.flash(_('An error occurred during unlocking'), category='error') |
|
243 | h.flash(_('An error occurred during unlocking'), category='error') | |
229 |
|
244 | |||
230 | raise HTTPFound( |
|
245 | raise HTTPFound( | |
231 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) |
|
246 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) | |
232 |
|
247 | |||
233 | @LoginRequired() |
|
248 | @LoginRequired() | |
234 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
249 | @HasRepoPermissionAnyDecorator('repository.admin') | |
235 | @view_config( |
|
250 | @view_config( | |
236 | route_name='edit_repo_advanced_hooks', request_method='GET', |
|
251 | route_name='edit_repo_advanced_hooks', request_method='GET', | |
237 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
252 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
238 | def edit_advanced_install_hooks(self): |
|
253 | def edit_advanced_install_hooks(self): | |
239 | """ |
|
254 | """ | |
240 | Install Hooks for repository |
|
255 | Install Hooks for repository | |
241 | """ |
|
256 | """ | |
242 | _ = self.request.translate |
|
257 | _ = self.request.translate | |
243 | self.load_default_context() |
|
258 | self.load_default_context() | |
244 | self.rhodecode_vcs_repo.install_hooks(force=True) |
|
259 | self.rhodecode_vcs_repo.install_hooks(force=True) | |
245 | h.flash(_('installed updated hooks into this repository'), |
|
260 | h.flash(_('installed updated hooks into this repository'), | |
246 | category='success') |
|
261 | category='success') | |
247 | raise HTTPFound( |
|
262 | raise HTTPFound( | |
248 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) |
|
263 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) |
@@ -1,155 +1,159 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Set of custom exceptions used in RhodeCode |
|
22 | Set of custom exceptions used in RhodeCode | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | from webob.exc import HTTPClientError |
|
25 | from webob.exc import HTTPClientError | |
26 | from pyramid.httpexceptions import HTTPBadGateway |
|
26 | from pyramid.httpexceptions import HTTPBadGateway | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | class LdapUsernameError(Exception): |
|
29 | class LdapUsernameError(Exception): | |
30 | pass |
|
30 | pass | |
31 |
|
31 | |||
32 |
|
32 | |||
33 | class LdapPasswordError(Exception): |
|
33 | class LdapPasswordError(Exception): | |
34 | pass |
|
34 | pass | |
35 |
|
35 | |||
36 |
|
36 | |||
37 | class LdapConnectionError(Exception): |
|
37 | class LdapConnectionError(Exception): | |
38 | pass |
|
38 | pass | |
39 |
|
39 | |||
40 |
|
40 | |||
41 | class LdapImportError(Exception): |
|
41 | class LdapImportError(Exception): | |
42 | pass |
|
42 | pass | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | class DefaultUserException(Exception): |
|
45 | class DefaultUserException(Exception): | |
46 | pass |
|
46 | pass | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | class UserOwnsReposException(Exception): |
|
49 | class UserOwnsReposException(Exception): | |
50 | pass |
|
50 | pass | |
51 |
|
51 | |||
52 |
|
52 | |||
53 | class UserOwnsRepoGroupsException(Exception): |
|
53 | class UserOwnsRepoGroupsException(Exception): | |
54 | pass |
|
54 | pass | |
55 |
|
55 | |||
56 |
|
56 | |||
57 | class UserOwnsUserGroupsException(Exception): |
|
57 | class UserOwnsUserGroupsException(Exception): | |
58 | pass |
|
58 | pass | |
59 |
|
59 | |||
60 |
|
60 | |||
61 | class UserGroupAssignedException(Exception): |
|
61 | class UserGroupAssignedException(Exception): | |
62 | pass |
|
62 | pass | |
63 |
|
63 | |||
64 |
|
64 | |||
65 | class StatusChangeOnClosedPullRequestError(Exception): |
|
65 | class StatusChangeOnClosedPullRequestError(Exception): | |
66 | pass |
|
66 | pass | |
67 |
|
67 | |||
68 |
|
68 | |||
69 | class AttachedForksError(Exception): |
|
69 | class AttachedForksError(Exception): | |
70 | pass |
|
70 | pass | |
71 |
|
71 | |||
72 |
|
72 | |||
|
73 | class AttachedPullRequestsError(Exception): | |||
|
74 | pass | |||
|
75 | ||||
|
76 | ||||
73 | class RepoGroupAssignmentError(Exception): |
|
77 | class RepoGroupAssignmentError(Exception): | |
74 | pass |
|
78 | pass | |
75 |
|
79 | |||
76 |
|
80 | |||
77 | class NonRelativePathError(Exception): |
|
81 | class NonRelativePathError(Exception): | |
78 | pass |
|
82 | pass | |
79 |
|
83 | |||
80 |
|
84 | |||
81 | class HTTPRequirementError(HTTPClientError): |
|
85 | class HTTPRequirementError(HTTPClientError): | |
82 | title = explanation = 'Repository Requirement Missing' |
|
86 | title = explanation = 'Repository Requirement Missing' | |
83 | reason = None |
|
87 | reason = None | |
84 |
|
88 | |||
85 | def __init__(self, message, *args, **kwargs): |
|
89 | def __init__(self, message, *args, **kwargs): | |
86 | self.title = self.explanation = message |
|
90 | self.title = self.explanation = message | |
87 | super(HTTPRequirementError, self).__init__(*args, **kwargs) |
|
91 | super(HTTPRequirementError, self).__init__(*args, **kwargs) | |
88 | self.args = (message, ) |
|
92 | self.args = (message, ) | |
89 |
|
93 | |||
90 |
|
94 | |||
91 | class HTTPLockedRC(HTTPClientError): |
|
95 | class HTTPLockedRC(HTTPClientError): | |
92 | """ |
|
96 | """ | |
93 | Special Exception For locked Repos in RhodeCode, the return code can |
|
97 | Special Exception For locked Repos in RhodeCode, the return code can | |
94 | be overwritten by _code keyword argument passed into constructors |
|
98 | be overwritten by _code keyword argument passed into constructors | |
95 | """ |
|
99 | """ | |
96 | code = 423 |
|
100 | code = 423 | |
97 | title = explanation = 'Repository Locked' |
|
101 | title = explanation = 'Repository Locked' | |
98 | reason = None |
|
102 | reason = None | |
99 |
|
103 | |||
100 | def __init__(self, message, *args, **kwargs): |
|
104 | def __init__(self, message, *args, **kwargs): | |
101 | from rhodecode import CONFIG |
|
105 | from rhodecode import CONFIG | |
102 | from rhodecode.lib.utils2 import safe_int |
|
106 | from rhodecode.lib.utils2 import safe_int | |
103 | _code = CONFIG.get('lock_ret_code') |
|
107 | _code = CONFIG.get('lock_ret_code') | |
104 | self.code = safe_int(_code, self.code) |
|
108 | self.code = safe_int(_code, self.code) | |
105 | self.title = self.explanation = message |
|
109 | self.title = self.explanation = message | |
106 | super(HTTPLockedRC, self).__init__(*args, **kwargs) |
|
110 | super(HTTPLockedRC, self).__init__(*args, **kwargs) | |
107 | self.args = (message, ) |
|
111 | self.args = (message, ) | |
108 |
|
112 | |||
109 |
|
113 | |||
110 | class HTTPBranchProtected(HTTPClientError): |
|
114 | class HTTPBranchProtected(HTTPClientError): | |
111 | """ |
|
115 | """ | |
112 | Special Exception For Indicating that branch is protected in RhodeCode, the |
|
116 | Special Exception For Indicating that branch is protected in RhodeCode, the | |
113 | return code can be overwritten by _code keyword argument passed into constructors |
|
117 | return code can be overwritten by _code keyword argument passed into constructors | |
114 | """ |
|
118 | """ | |
115 | code = 403 |
|
119 | code = 403 | |
116 | title = explanation = 'Branch Protected' |
|
120 | title = explanation = 'Branch Protected' | |
117 | reason = None |
|
121 | reason = None | |
118 |
|
122 | |||
119 | def __init__(self, message, *args, **kwargs): |
|
123 | def __init__(self, message, *args, **kwargs): | |
120 | self.title = self.explanation = message |
|
124 | self.title = self.explanation = message | |
121 | super(HTTPBranchProtected, self).__init__(*args, **kwargs) |
|
125 | super(HTTPBranchProtected, self).__init__(*args, **kwargs) | |
122 | self.args = (message, ) |
|
126 | self.args = (message, ) | |
123 |
|
127 | |||
124 |
|
128 | |||
125 | class IMCCommitError(Exception): |
|
129 | class IMCCommitError(Exception): | |
126 | pass |
|
130 | pass | |
127 |
|
131 | |||
128 |
|
132 | |||
129 | class UserCreationError(Exception): |
|
133 | class UserCreationError(Exception): | |
130 | pass |
|
134 | pass | |
131 |
|
135 | |||
132 |
|
136 | |||
133 | class NotAllowedToCreateUserError(Exception): |
|
137 | class NotAllowedToCreateUserError(Exception): | |
134 | pass |
|
138 | pass | |
135 |
|
139 | |||
136 |
|
140 | |||
137 | class RepositoryCreationError(Exception): |
|
141 | class RepositoryCreationError(Exception): | |
138 | pass |
|
142 | pass | |
139 |
|
143 | |||
140 |
|
144 | |||
141 | class VCSServerUnavailable(HTTPBadGateway): |
|
145 | class VCSServerUnavailable(HTTPBadGateway): | |
142 | """ HTTP Exception class for VCS Server errors """ |
|
146 | """ HTTP Exception class for VCS Server errors """ | |
143 | code = 502 |
|
147 | code = 502 | |
144 | title = 'VCS Server Error' |
|
148 | title = 'VCS Server Error' | |
145 | causes = [ |
|
149 | causes = [ | |
146 | 'VCS Server is not running', |
|
150 | 'VCS Server is not running', | |
147 | 'Incorrect vcs.server=host:port', |
|
151 | 'Incorrect vcs.server=host:port', | |
148 | 'Incorrect vcs.server.protocol', |
|
152 | 'Incorrect vcs.server.protocol', | |
149 | ] |
|
153 | ] | |
150 |
|
154 | |||
151 | def __init__(self, message=''): |
|
155 | def __init__(self, message=''): | |
152 | self.explanation = 'Could not connect to VCS Server' |
|
156 | self.explanation = 'Could not connect to VCS Server' | |
153 | if message: |
|
157 | if message: | |
154 | self.explanation += ': ' + message |
|
158 | self.explanation += ': ' + message | |
155 | super(VCSServerUnavailable, self).__init__() |
|
159 | super(VCSServerUnavailable, self).__init__() |
@@ -1,1047 +1,1053 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import re |
|
22 | import re | |
23 | import shutil |
|
23 | import shutil | |
24 | import time |
|
24 | import time | |
25 | import logging |
|
25 | import logging | |
26 | import traceback |
|
26 | import traceback | |
27 | import datetime |
|
27 | import datetime | |
28 |
|
28 | |||
29 | from pyramid.threadlocal import get_current_request |
|
29 | from pyramid.threadlocal import get_current_request | |
30 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
30 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
31 |
|
31 | |||
32 | from rhodecode import events |
|
32 | from rhodecode import events | |
33 | from rhodecode.lib.auth import HasUserGroupPermissionAny |
|
33 | from rhodecode.lib.auth import HasUserGroupPermissionAny | |
34 | from rhodecode.lib.caching_query import FromCache |
|
34 | from rhodecode.lib.caching_query import FromCache | |
35 | from rhodecode.lib.exceptions import AttachedForksError |
|
35 | from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError | |
36 | from rhodecode.lib.hooks_base import log_delete_repository |
|
36 | from rhodecode.lib.hooks_base import log_delete_repository | |
37 | from rhodecode.lib.user_log_filter import user_log_filter |
|
37 | from rhodecode.lib.user_log_filter import user_log_filter | |
38 | from rhodecode.lib.utils import make_db_config |
|
38 | from rhodecode.lib.utils import make_db_config | |
39 | from rhodecode.lib.utils2 import ( |
|
39 | from rhodecode.lib.utils2 import ( | |
40 | safe_str, safe_unicode, remove_prefix, obfuscate_url_pw, |
|
40 | safe_str, safe_unicode, remove_prefix, obfuscate_url_pw, | |
41 | get_current_rhodecode_user, safe_int, datetime_to_time, |
|
41 | get_current_rhodecode_user, safe_int, datetime_to_time, | |
42 | action_logger_generic) |
|
42 | action_logger_generic) | |
43 | from rhodecode.lib.vcs.backends import get_backend |
|
43 | from rhodecode.lib.vcs.backends import get_backend | |
44 | from rhodecode.model import BaseModel |
|
44 | from rhodecode.model import BaseModel | |
45 | from rhodecode.model.db import ( |
|
45 | from rhodecode.model.db import ( | |
46 | _hash_key, joinedload, or_, Repository, UserRepoToPerm, UserGroupRepoToPerm, |
|
46 | _hash_key, joinedload, or_, Repository, UserRepoToPerm, UserGroupRepoToPerm, | |
47 | UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission, |
|
47 | UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission, | |
48 | Statistics, UserGroup, RepoGroup, RepositoryField, UserLog) |
|
48 | Statistics, UserGroup, RepoGroup, RepositoryField, UserLog) | |
49 |
|
49 | |||
50 | from rhodecode.model.settings import VcsSettingsModel |
|
50 | from rhodecode.model.settings import VcsSettingsModel | |
51 |
|
51 | |||
52 |
|
52 | |||
53 | log = logging.getLogger(__name__) |
|
53 | log = logging.getLogger(__name__) | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | class RepoModel(BaseModel): |
|
56 | class RepoModel(BaseModel): | |
57 |
|
57 | |||
58 | cls = Repository |
|
58 | cls = Repository | |
59 |
|
59 | |||
60 | def _get_user_group(self, users_group): |
|
60 | def _get_user_group(self, users_group): | |
61 | return self._get_instance(UserGroup, users_group, |
|
61 | return self._get_instance(UserGroup, users_group, | |
62 | callback=UserGroup.get_by_group_name) |
|
62 | callback=UserGroup.get_by_group_name) | |
63 |
|
63 | |||
64 | def _get_repo_group(self, repo_group): |
|
64 | def _get_repo_group(self, repo_group): | |
65 | return self._get_instance(RepoGroup, repo_group, |
|
65 | return self._get_instance(RepoGroup, repo_group, | |
66 | callback=RepoGroup.get_by_group_name) |
|
66 | callback=RepoGroup.get_by_group_name) | |
67 |
|
67 | |||
68 | def _create_default_perms(self, repository, private): |
|
68 | def _create_default_perms(self, repository, private): | |
69 | # create default permission |
|
69 | # create default permission | |
70 | default = 'repository.read' |
|
70 | default = 'repository.read' | |
71 | def_user = User.get_default_user() |
|
71 | def_user = User.get_default_user() | |
72 | for p in def_user.user_perms: |
|
72 | for p in def_user.user_perms: | |
73 | if p.permission.permission_name.startswith('repository.'): |
|
73 | if p.permission.permission_name.startswith('repository.'): | |
74 | default = p.permission.permission_name |
|
74 | default = p.permission.permission_name | |
75 | break |
|
75 | break | |
76 |
|
76 | |||
77 | default_perm = 'repository.none' if private else default |
|
77 | default_perm = 'repository.none' if private else default | |
78 |
|
78 | |||
79 | repo_to_perm = UserRepoToPerm() |
|
79 | repo_to_perm = UserRepoToPerm() | |
80 | repo_to_perm.permission = Permission.get_by_key(default_perm) |
|
80 | repo_to_perm.permission = Permission.get_by_key(default_perm) | |
81 |
|
81 | |||
82 | repo_to_perm.repository = repository |
|
82 | repo_to_perm.repository = repository | |
83 | repo_to_perm.user_id = def_user.user_id |
|
83 | repo_to_perm.user_id = def_user.user_id | |
84 |
|
84 | |||
85 | return repo_to_perm |
|
85 | return repo_to_perm | |
86 |
|
86 | |||
87 | @LazyProperty |
|
87 | @LazyProperty | |
88 | def repos_path(self): |
|
88 | def repos_path(self): | |
89 | """ |
|
89 | """ | |
90 | Gets the repositories root path from database |
|
90 | Gets the repositories root path from database | |
91 | """ |
|
91 | """ | |
92 | settings_model = VcsSettingsModel(sa=self.sa) |
|
92 | settings_model = VcsSettingsModel(sa=self.sa) | |
93 | return settings_model.get_repos_location() |
|
93 | return settings_model.get_repos_location() | |
94 |
|
94 | |||
95 | def get(self, repo_id): |
|
95 | def get(self, repo_id): | |
96 | repo = self.sa.query(Repository) \ |
|
96 | repo = self.sa.query(Repository) \ | |
97 | .filter(Repository.repo_id == repo_id) |
|
97 | .filter(Repository.repo_id == repo_id) | |
98 |
|
98 | |||
99 | return repo.scalar() |
|
99 | return repo.scalar() | |
100 |
|
100 | |||
101 | def get_repo(self, repository): |
|
101 | def get_repo(self, repository): | |
102 | return self._get_repo(repository) |
|
102 | return self._get_repo(repository) | |
103 |
|
103 | |||
104 | def get_by_repo_name(self, repo_name, cache=False): |
|
104 | def get_by_repo_name(self, repo_name, cache=False): | |
105 | repo = self.sa.query(Repository) \ |
|
105 | repo = self.sa.query(Repository) \ | |
106 | .filter(Repository.repo_name == repo_name) |
|
106 | .filter(Repository.repo_name == repo_name) | |
107 |
|
107 | |||
108 | if cache: |
|
108 | if cache: | |
109 | name_key = _hash_key(repo_name) |
|
109 | name_key = _hash_key(repo_name) | |
110 | repo = repo.options( |
|
110 | repo = repo.options( | |
111 | FromCache("sql_cache_short", "get_repo_%s" % name_key)) |
|
111 | FromCache("sql_cache_short", "get_repo_%s" % name_key)) | |
112 | return repo.scalar() |
|
112 | return repo.scalar() | |
113 |
|
113 | |||
114 | def _extract_id_from_repo_name(self, repo_name): |
|
114 | def _extract_id_from_repo_name(self, repo_name): | |
115 | if repo_name.startswith('/'): |
|
115 | if repo_name.startswith('/'): | |
116 | repo_name = repo_name.lstrip('/') |
|
116 | repo_name = repo_name.lstrip('/') | |
117 | by_id_match = re.match(r'^_(\d{1,})', repo_name) |
|
117 | by_id_match = re.match(r'^_(\d{1,})', repo_name) | |
118 | if by_id_match: |
|
118 | if by_id_match: | |
119 | return by_id_match.groups()[0] |
|
119 | return by_id_match.groups()[0] | |
120 |
|
120 | |||
121 | def get_repo_by_id(self, repo_name): |
|
121 | def get_repo_by_id(self, repo_name): | |
122 | """ |
|
122 | """ | |
123 | Extracts repo_name by id from special urls. |
|
123 | Extracts repo_name by id from special urls. | |
124 | Example url is _11/repo_name |
|
124 | Example url is _11/repo_name | |
125 |
|
125 | |||
126 | :param repo_name: |
|
126 | :param repo_name: | |
127 | :return: repo object if matched else None |
|
127 | :return: repo object if matched else None | |
128 | """ |
|
128 | """ | |
129 |
|
129 | |||
130 | try: |
|
130 | try: | |
131 | _repo_id = self._extract_id_from_repo_name(repo_name) |
|
131 | _repo_id = self._extract_id_from_repo_name(repo_name) | |
132 | if _repo_id: |
|
132 | if _repo_id: | |
133 | return self.get(_repo_id) |
|
133 | return self.get(_repo_id) | |
134 | except Exception: |
|
134 | except Exception: | |
135 | log.exception('Failed to extract repo_name from URL') |
|
135 | log.exception('Failed to extract repo_name from URL') | |
136 |
|
136 | |||
137 | return None |
|
137 | return None | |
138 |
|
138 | |||
139 | def get_repos_for_root(self, root, traverse=False): |
|
139 | def get_repos_for_root(self, root, traverse=False): | |
140 | if traverse: |
|
140 | if traverse: | |
141 | like_expression = u'{}%'.format(safe_unicode(root)) |
|
141 | like_expression = u'{}%'.format(safe_unicode(root)) | |
142 | repos = Repository.query().filter( |
|
142 | repos = Repository.query().filter( | |
143 | Repository.repo_name.like(like_expression)).all() |
|
143 | Repository.repo_name.like(like_expression)).all() | |
144 | else: |
|
144 | else: | |
145 | if root and not isinstance(root, RepoGroup): |
|
145 | if root and not isinstance(root, RepoGroup): | |
146 | raise ValueError( |
|
146 | raise ValueError( | |
147 | 'Root must be an instance ' |
|
147 | 'Root must be an instance ' | |
148 | 'of RepoGroup, got:{} instead'.format(type(root))) |
|
148 | 'of RepoGroup, got:{} instead'.format(type(root))) | |
149 | repos = Repository.query().filter(Repository.group == root).all() |
|
149 | repos = Repository.query().filter(Repository.group == root).all() | |
150 | return repos |
|
150 | return repos | |
151 |
|
151 | |||
152 | def get_url(self, repo, request=None, permalink=False): |
|
152 | def get_url(self, repo, request=None, permalink=False): | |
153 | if not request: |
|
153 | if not request: | |
154 | request = get_current_request() |
|
154 | request = get_current_request() | |
155 |
|
155 | |||
156 | if not request: |
|
156 | if not request: | |
157 | return |
|
157 | return | |
158 |
|
158 | |||
159 | if permalink: |
|
159 | if permalink: | |
160 | return request.route_url( |
|
160 | return request.route_url( | |
161 | 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id))) |
|
161 | 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id))) | |
162 | else: |
|
162 | else: | |
163 | return request.route_url( |
|
163 | return request.route_url( | |
164 | 'repo_summary', repo_name=safe_str(repo.repo_name)) |
|
164 | 'repo_summary', repo_name=safe_str(repo.repo_name)) | |
165 |
|
165 | |||
166 | def get_commit_url(self, repo, commit_id, request=None, permalink=False): |
|
166 | def get_commit_url(self, repo, commit_id, request=None, permalink=False): | |
167 | if not request: |
|
167 | if not request: | |
168 | request = get_current_request() |
|
168 | request = get_current_request() | |
169 |
|
169 | |||
170 | if not request: |
|
170 | if not request: | |
171 | return |
|
171 | return | |
172 |
|
172 | |||
173 | if permalink: |
|
173 | if permalink: | |
174 | return request.route_url( |
|
174 | return request.route_url( | |
175 | 'repo_commit', repo_name=safe_str(repo.repo_id), |
|
175 | 'repo_commit', repo_name=safe_str(repo.repo_id), | |
176 | commit_id=commit_id) |
|
176 | commit_id=commit_id) | |
177 |
|
177 | |||
178 | else: |
|
178 | else: | |
179 | return request.route_url( |
|
179 | return request.route_url( | |
180 | 'repo_commit', repo_name=safe_str(repo.repo_name), |
|
180 | 'repo_commit', repo_name=safe_str(repo.repo_name), | |
181 | commit_id=commit_id) |
|
181 | commit_id=commit_id) | |
182 |
|
182 | |||
183 | def get_repo_log(self, repo, filter_term): |
|
183 | def get_repo_log(self, repo, filter_term): | |
184 | repo_log = UserLog.query()\ |
|
184 | repo_log = UserLog.query()\ | |
185 | .filter(or_(UserLog.repository_id == repo.repo_id, |
|
185 | .filter(or_(UserLog.repository_id == repo.repo_id, | |
186 | UserLog.repository_name == repo.repo_name))\ |
|
186 | UserLog.repository_name == repo.repo_name))\ | |
187 | .options(joinedload(UserLog.user))\ |
|
187 | .options(joinedload(UserLog.user))\ | |
188 | .options(joinedload(UserLog.repository))\ |
|
188 | .options(joinedload(UserLog.repository))\ | |
189 | .order_by(UserLog.action_date.desc()) |
|
189 | .order_by(UserLog.action_date.desc()) | |
190 |
|
190 | |||
191 | repo_log = user_log_filter(repo_log, filter_term) |
|
191 | repo_log = user_log_filter(repo_log, filter_term) | |
192 | return repo_log |
|
192 | return repo_log | |
193 |
|
193 | |||
194 | @classmethod |
|
194 | @classmethod | |
195 | def update_repoinfo(cls, repositories=None): |
|
195 | def update_repoinfo(cls, repositories=None): | |
196 | if not repositories: |
|
196 | if not repositories: | |
197 | repositories = Repository.getAll() |
|
197 | repositories = Repository.getAll() | |
198 | for repo in repositories: |
|
198 | for repo in repositories: | |
199 | repo.update_commit_cache() |
|
199 | repo.update_commit_cache() | |
200 |
|
200 | |||
201 | def get_repos_as_dict(self, repo_list=None, admin=False, |
|
201 | def get_repos_as_dict(self, repo_list=None, admin=False, | |
202 | super_user_actions=False): |
|
202 | super_user_actions=False): | |
203 | _render = get_current_request().get_partial_renderer( |
|
203 | _render = get_current_request().get_partial_renderer( | |
204 | 'rhodecode:templates/data_table/_dt_elements.mako') |
|
204 | 'rhodecode:templates/data_table/_dt_elements.mako') | |
205 | c = _render.get_call_context() |
|
205 | c = _render.get_call_context() | |
206 |
|
206 | |||
207 | def quick_menu(repo_name): |
|
207 | def quick_menu(repo_name): | |
208 | return _render('quick_menu', repo_name) |
|
208 | return _render('quick_menu', repo_name) | |
209 |
|
209 | |||
210 | def repo_lnk(name, rtype, rstate, private, fork_of): |
|
210 | def repo_lnk(name, rtype, rstate, private, fork_of): | |
211 | return _render('repo_name', name, rtype, rstate, private, fork_of, |
|
211 | return _render('repo_name', name, rtype, rstate, private, fork_of, | |
212 | short_name=not admin, admin=False) |
|
212 | short_name=not admin, admin=False) | |
213 |
|
213 | |||
214 | def last_change(last_change): |
|
214 | def last_change(last_change): | |
215 | if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo: |
|
215 | if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo: | |
216 | last_change = last_change + datetime.timedelta(seconds= |
|
216 | last_change = last_change + datetime.timedelta(seconds= | |
217 | (datetime.datetime.now() - datetime.datetime.utcnow()).seconds) |
|
217 | (datetime.datetime.now() - datetime.datetime.utcnow()).seconds) | |
218 | return _render("last_change", last_change) |
|
218 | return _render("last_change", last_change) | |
219 |
|
219 | |||
220 | def rss_lnk(repo_name): |
|
220 | def rss_lnk(repo_name): | |
221 | return _render("rss", repo_name) |
|
221 | return _render("rss", repo_name) | |
222 |
|
222 | |||
223 | def atom_lnk(repo_name): |
|
223 | def atom_lnk(repo_name): | |
224 | return _render("atom", repo_name) |
|
224 | return _render("atom", repo_name) | |
225 |
|
225 | |||
226 | def last_rev(repo_name, cs_cache): |
|
226 | def last_rev(repo_name, cs_cache): | |
227 | return _render('revision', repo_name, cs_cache.get('revision'), |
|
227 | return _render('revision', repo_name, cs_cache.get('revision'), | |
228 | cs_cache.get('raw_id'), cs_cache.get('author'), |
|
228 | cs_cache.get('raw_id'), cs_cache.get('author'), | |
229 | cs_cache.get('message'), cs_cache.get('date')) |
|
229 | cs_cache.get('message'), cs_cache.get('date')) | |
230 |
|
230 | |||
231 | def desc(desc): |
|
231 | def desc(desc): | |
232 | return _render('repo_desc', desc, c.visual.stylify_metatags) |
|
232 | return _render('repo_desc', desc, c.visual.stylify_metatags) | |
233 |
|
233 | |||
234 | def state(repo_state): |
|
234 | def state(repo_state): | |
235 | return _render("repo_state", repo_state) |
|
235 | return _render("repo_state", repo_state) | |
236 |
|
236 | |||
237 | def repo_actions(repo_name): |
|
237 | def repo_actions(repo_name): | |
238 | return _render('repo_actions', repo_name, super_user_actions) |
|
238 | return _render('repo_actions', repo_name, super_user_actions) | |
239 |
|
239 | |||
240 | def user_profile(username): |
|
240 | def user_profile(username): | |
241 | return _render('user_profile', username) |
|
241 | return _render('user_profile', username) | |
242 |
|
242 | |||
243 | repos_data = [] |
|
243 | repos_data = [] | |
244 | for repo in repo_list: |
|
244 | for repo in repo_list: | |
245 | cs_cache = repo.changeset_cache |
|
245 | cs_cache = repo.changeset_cache | |
246 | row = { |
|
246 | row = { | |
247 | "menu": quick_menu(repo.repo_name), |
|
247 | "menu": quick_menu(repo.repo_name), | |
248 |
|
248 | |||
249 | "name": repo_lnk(repo.repo_name, repo.repo_type, |
|
249 | "name": repo_lnk(repo.repo_name, repo.repo_type, | |
250 | repo.repo_state, repo.private, repo.fork), |
|
250 | repo.repo_state, repo.private, repo.fork), | |
251 | "name_raw": repo.repo_name.lower(), |
|
251 | "name_raw": repo.repo_name.lower(), | |
252 |
|
252 | |||
253 | "last_change": last_change(repo.last_db_change), |
|
253 | "last_change": last_change(repo.last_db_change), | |
254 | "last_change_raw": datetime_to_time(repo.last_db_change), |
|
254 | "last_change_raw": datetime_to_time(repo.last_db_change), | |
255 |
|
255 | |||
256 | "last_changeset": last_rev(repo.repo_name, cs_cache), |
|
256 | "last_changeset": last_rev(repo.repo_name, cs_cache), | |
257 | "last_changeset_raw": cs_cache.get('revision'), |
|
257 | "last_changeset_raw": cs_cache.get('revision'), | |
258 |
|
258 | |||
259 | "desc": desc(repo.description_safe), |
|
259 | "desc": desc(repo.description_safe), | |
260 | "owner": user_profile(repo.user.username), |
|
260 | "owner": user_profile(repo.user.username), | |
261 |
|
261 | |||
262 | "state": state(repo.repo_state), |
|
262 | "state": state(repo.repo_state), | |
263 | "rss": rss_lnk(repo.repo_name), |
|
263 | "rss": rss_lnk(repo.repo_name), | |
264 |
|
264 | |||
265 | "atom": atom_lnk(repo.repo_name), |
|
265 | "atom": atom_lnk(repo.repo_name), | |
266 | } |
|
266 | } | |
267 | if admin: |
|
267 | if admin: | |
268 | row.update({ |
|
268 | row.update({ | |
269 | "action": repo_actions(repo.repo_name), |
|
269 | "action": repo_actions(repo.repo_name), | |
270 | }) |
|
270 | }) | |
271 | repos_data.append(row) |
|
271 | repos_data.append(row) | |
272 |
|
272 | |||
273 | return repos_data |
|
273 | return repos_data | |
274 |
|
274 | |||
275 | def _get_defaults(self, repo_name): |
|
275 | def _get_defaults(self, repo_name): | |
276 | """ |
|
276 | """ | |
277 | Gets information about repository, and returns a dict for |
|
277 | Gets information about repository, and returns a dict for | |
278 | usage in forms |
|
278 | usage in forms | |
279 |
|
279 | |||
280 | :param repo_name: |
|
280 | :param repo_name: | |
281 | """ |
|
281 | """ | |
282 |
|
282 | |||
283 | repo_info = Repository.get_by_repo_name(repo_name) |
|
283 | repo_info = Repository.get_by_repo_name(repo_name) | |
284 |
|
284 | |||
285 | if repo_info is None: |
|
285 | if repo_info is None: | |
286 | return None |
|
286 | return None | |
287 |
|
287 | |||
288 | defaults = repo_info.get_dict() |
|
288 | defaults = repo_info.get_dict() | |
289 | defaults['repo_name'] = repo_info.just_name |
|
289 | defaults['repo_name'] = repo_info.just_name | |
290 |
|
290 | |||
291 | groups = repo_info.groups_with_parents |
|
291 | groups = repo_info.groups_with_parents | |
292 | parent_group = groups[-1] if groups else None |
|
292 | parent_group = groups[-1] if groups else None | |
293 |
|
293 | |||
294 | # we use -1 as this is how in HTML, we mark an empty group |
|
294 | # we use -1 as this is how in HTML, we mark an empty group | |
295 | defaults['repo_group'] = getattr(parent_group, 'group_id', -1) |
|
295 | defaults['repo_group'] = getattr(parent_group, 'group_id', -1) | |
296 |
|
296 | |||
297 | keys_to_process = ( |
|
297 | keys_to_process = ( | |
298 | {'k': 'repo_type', 'strip': False}, |
|
298 | {'k': 'repo_type', 'strip': False}, | |
299 | {'k': 'repo_enable_downloads', 'strip': True}, |
|
299 | {'k': 'repo_enable_downloads', 'strip': True}, | |
300 | {'k': 'repo_description', 'strip': True}, |
|
300 | {'k': 'repo_description', 'strip': True}, | |
301 | {'k': 'repo_enable_locking', 'strip': True}, |
|
301 | {'k': 'repo_enable_locking', 'strip': True}, | |
302 | {'k': 'repo_landing_rev', 'strip': True}, |
|
302 | {'k': 'repo_landing_rev', 'strip': True}, | |
303 | {'k': 'clone_uri', 'strip': False}, |
|
303 | {'k': 'clone_uri', 'strip': False}, | |
304 | {'k': 'push_uri', 'strip': False}, |
|
304 | {'k': 'push_uri', 'strip': False}, | |
305 | {'k': 'repo_private', 'strip': True}, |
|
305 | {'k': 'repo_private', 'strip': True}, | |
306 | {'k': 'repo_enable_statistics', 'strip': True} |
|
306 | {'k': 'repo_enable_statistics', 'strip': True} | |
307 | ) |
|
307 | ) | |
308 |
|
308 | |||
309 | for item in keys_to_process: |
|
309 | for item in keys_to_process: | |
310 | attr = item['k'] |
|
310 | attr = item['k'] | |
311 | if item['strip']: |
|
311 | if item['strip']: | |
312 | attr = remove_prefix(item['k'], 'repo_') |
|
312 | attr = remove_prefix(item['k'], 'repo_') | |
313 |
|
313 | |||
314 | val = defaults[attr] |
|
314 | val = defaults[attr] | |
315 | if item['k'] == 'repo_landing_rev': |
|
315 | if item['k'] == 'repo_landing_rev': | |
316 | val = ':'.join(defaults[attr]) |
|
316 | val = ':'.join(defaults[attr]) | |
317 | defaults[item['k']] = val |
|
317 | defaults[item['k']] = val | |
318 | if item['k'] == 'clone_uri': |
|
318 | if item['k'] == 'clone_uri': | |
319 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden |
|
319 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden | |
320 | if item['k'] == 'push_uri': |
|
320 | if item['k'] == 'push_uri': | |
321 | defaults['push_uri_hidden'] = repo_info.push_uri_hidden |
|
321 | defaults['push_uri_hidden'] = repo_info.push_uri_hidden | |
322 |
|
322 | |||
323 | # fill owner |
|
323 | # fill owner | |
324 | if repo_info.user: |
|
324 | if repo_info.user: | |
325 | defaults.update({'user': repo_info.user.username}) |
|
325 | defaults.update({'user': repo_info.user.username}) | |
326 | else: |
|
326 | else: | |
327 | replacement_user = User.get_first_super_admin().username |
|
327 | replacement_user = User.get_first_super_admin().username | |
328 | defaults.update({'user': replacement_user}) |
|
328 | defaults.update({'user': replacement_user}) | |
329 |
|
329 | |||
330 | return defaults |
|
330 | return defaults | |
331 |
|
331 | |||
332 | def update(self, repo, **kwargs): |
|
332 | def update(self, repo, **kwargs): | |
333 | try: |
|
333 | try: | |
334 | cur_repo = self._get_repo(repo) |
|
334 | cur_repo = self._get_repo(repo) | |
335 | source_repo_name = cur_repo.repo_name |
|
335 | source_repo_name = cur_repo.repo_name | |
336 | if 'user' in kwargs: |
|
336 | if 'user' in kwargs: | |
337 | cur_repo.user = User.get_by_username(kwargs['user']) |
|
337 | cur_repo.user = User.get_by_username(kwargs['user']) | |
338 |
|
338 | |||
339 | if 'repo_group' in kwargs: |
|
339 | if 'repo_group' in kwargs: | |
340 | cur_repo.group = RepoGroup.get(kwargs['repo_group']) |
|
340 | cur_repo.group = RepoGroup.get(kwargs['repo_group']) | |
341 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) |
|
341 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) | |
342 |
|
342 | |||
343 | update_keys = [ |
|
343 | update_keys = [ | |
344 | (1, 'repo_description'), |
|
344 | (1, 'repo_description'), | |
345 | (1, 'repo_landing_rev'), |
|
345 | (1, 'repo_landing_rev'), | |
346 | (1, 'repo_private'), |
|
346 | (1, 'repo_private'), | |
347 | (1, 'repo_enable_downloads'), |
|
347 | (1, 'repo_enable_downloads'), | |
348 | (1, 'repo_enable_locking'), |
|
348 | (1, 'repo_enable_locking'), | |
349 | (1, 'repo_enable_statistics'), |
|
349 | (1, 'repo_enable_statistics'), | |
350 | (0, 'clone_uri'), |
|
350 | (0, 'clone_uri'), | |
351 | (0, 'push_uri'), |
|
351 | (0, 'push_uri'), | |
352 | (0, 'fork_id') |
|
352 | (0, 'fork_id') | |
353 | ] |
|
353 | ] | |
354 | for strip, k in update_keys: |
|
354 | for strip, k in update_keys: | |
355 | if k in kwargs: |
|
355 | if k in kwargs: | |
356 | val = kwargs[k] |
|
356 | val = kwargs[k] | |
357 | if strip: |
|
357 | if strip: | |
358 | k = remove_prefix(k, 'repo_') |
|
358 | k = remove_prefix(k, 'repo_') | |
359 |
|
359 | |||
360 | setattr(cur_repo, k, val) |
|
360 | setattr(cur_repo, k, val) | |
361 |
|
361 | |||
362 | new_name = cur_repo.get_new_name(kwargs['repo_name']) |
|
362 | new_name = cur_repo.get_new_name(kwargs['repo_name']) | |
363 | cur_repo.repo_name = new_name |
|
363 | cur_repo.repo_name = new_name | |
364 |
|
364 | |||
365 | # if private flag is set, reset default permission to NONE |
|
365 | # if private flag is set, reset default permission to NONE | |
366 | if kwargs.get('repo_private'): |
|
366 | if kwargs.get('repo_private'): | |
367 | EMPTY_PERM = 'repository.none' |
|
367 | EMPTY_PERM = 'repository.none' | |
368 | RepoModel().grant_user_permission( |
|
368 | RepoModel().grant_user_permission( | |
369 | repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM |
|
369 | repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM | |
370 | ) |
|
370 | ) | |
371 |
|
371 | |||
372 | # handle extra fields |
|
372 | # handle extra fields | |
373 | for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), |
|
373 | for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), | |
374 | kwargs): |
|
374 | kwargs): | |
375 | k = RepositoryField.un_prefix_key(field) |
|
375 | k = RepositoryField.un_prefix_key(field) | |
376 | ex_field = RepositoryField.get_by_key_name( |
|
376 | ex_field = RepositoryField.get_by_key_name( | |
377 | key=k, repo=cur_repo) |
|
377 | key=k, repo=cur_repo) | |
378 | if ex_field: |
|
378 | if ex_field: | |
379 | ex_field.field_value = kwargs[field] |
|
379 | ex_field.field_value = kwargs[field] | |
380 | self.sa.add(ex_field) |
|
380 | self.sa.add(ex_field) | |
381 | cur_repo.updated_on = datetime.datetime.now() |
|
381 | cur_repo.updated_on = datetime.datetime.now() | |
382 | self.sa.add(cur_repo) |
|
382 | self.sa.add(cur_repo) | |
383 |
|
383 | |||
384 | if source_repo_name != new_name: |
|
384 | if source_repo_name != new_name: | |
385 | # rename repository |
|
385 | # rename repository | |
386 | self._rename_filesystem_repo( |
|
386 | self._rename_filesystem_repo( | |
387 | old=source_repo_name, new=new_name) |
|
387 | old=source_repo_name, new=new_name) | |
388 |
|
388 | |||
389 | return cur_repo |
|
389 | return cur_repo | |
390 | except Exception: |
|
390 | except Exception: | |
391 | log.error(traceback.format_exc()) |
|
391 | log.error(traceback.format_exc()) | |
392 | raise |
|
392 | raise | |
393 |
|
393 | |||
394 | def _create_repo(self, repo_name, repo_type, description, owner, |
|
394 | def _create_repo(self, repo_name, repo_type, description, owner, | |
395 | private=False, clone_uri=None, repo_group=None, |
|
395 | private=False, clone_uri=None, repo_group=None, | |
396 | landing_rev='rev:tip', fork_of=None, |
|
396 | landing_rev='rev:tip', fork_of=None, | |
397 | copy_fork_permissions=False, enable_statistics=False, |
|
397 | copy_fork_permissions=False, enable_statistics=False, | |
398 | enable_locking=False, enable_downloads=False, |
|
398 | enable_locking=False, enable_downloads=False, | |
399 | copy_group_permissions=False, |
|
399 | copy_group_permissions=False, | |
400 | state=Repository.STATE_PENDING): |
|
400 | state=Repository.STATE_PENDING): | |
401 | """ |
|
401 | """ | |
402 | Create repository inside database with PENDING state, this should be |
|
402 | Create repository inside database with PENDING state, this should be | |
403 | only executed by create() repo. With exception of importing existing |
|
403 | only executed by create() repo. With exception of importing existing | |
404 | repos |
|
404 | repos | |
405 | """ |
|
405 | """ | |
406 | from rhodecode.model.scm import ScmModel |
|
406 | from rhodecode.model.scm import ScmModel | |
407 |
|
407 | |||
408 | owner = self._get_user(owner) |
|
408 | owner = self._get_user(owner) | |
409 | fork_of = self._get_repo(fork_of) |
|
409 | fork_of = self._get_repo(fork_of) | |
410 | repo_group = self._get_repo_group(safe_int(repo_group)) |
|
410 | repo_group = self._get_repo_group(safe_int(repo_group)) | |
411 |
|
411 | |||
412 | try: |
|
412 | try: | |
413 | repo_name = safe_unicode(repo_name) |
|
413 | repo_name = safe_unicode(repo_name) | |
414 | description = safe_unicode(description) |
|
414 | description = safe_unicode(description) | |
415 | # repo name is just a name of repository |
|
415 | # repo name is just a name of repository | |
416 | # while repo_name_full is a full qualified name that is combined |
|
416 | # while repo_name_full is a full qualified name that is combined | |
417 | # with name and path of group |
|
417 | # with name and path of group | |
418 | repo_name_full = repo_name |
|
418 | repo_name_full = repo_name | |
419 | repo_name = repo_name.split(Repository.NAME_SEP)[-1] |
|
419 | repo_name = repo_name.split(Repository.NAME_SEP)[-1] | |
420 |
|
420 | |||
421 | new_repo = Repository() |
|
421 | new_repo = Repository() | |
422 | new_repo.repo_state = state |
|
422 | new_repo.repo_state = state | |
423 | new_repo.enable_statistics = False |
|
423 | new_repo.enable_statistics = False | |
424 | new_repo.repo_name = repo_name_full |
|
424 | new_repo.repo_name = repo_name_full | |
425 | new_repo.repo_type = repo_type |
|
425 | new_repo.repo_type = repo_type | |
426 | new_repo.user = owner |
|
426 | new_repo.user = owner | |
427 | new_repo.group = repo_group |
|
427 | new_repo.group = repo_group | |
428 | new_repo.description = description or repo_name |
|
428 | new_repo.description = description or repo_name | |
429 | new_repo.private = private |
|
429 | new_repo.private = private | |
430 | new_repo.clone_uri = clone_uri |
|
430 | new_repo.clone_uri = clone_uri | |
431 | new_repo.landing_rev = landing_rev |
|
431 | new_repo.landing_rev = landing_rev | |
432 |
|
432 | |||
433 | new_repo.enable_statistics = enable_statistics |
|
433 | new_repo.enable_statistics = enable_statistics | |
434 | new_repo.enable_locking = enable_locking |
|
434 | new_repo.enable_locking = enable_locking | |
435 | new_repo.enable_downloads = enable_downloads |
|
435 | new_repo.enable_downloads = enable_downloads | |
436 |
|
436 | |||
437 | if repo_group: |
|
437 | if repo_group: | |
438 | new_repo.enable_locking = repo_group.enable_locking |
|
438 | new_repo.enable_locking = repo_group.enable_locking | |
439 |
|
439 | |||
440 | if fork_of: |
|
440 | if fork_of: | |
441 | parent_repo = fork_of |
|
441 | parent_repo = fork_of | |
442 | new_repo.fork = parent_repo |
|
442 | new_repo.fork = parent_repo | |
443 |
|
443 | |||
444 | events.trigger(events.RepoPreCreateEvent(new_repo)) |
|
444 | events.trigger(events.RepoPreCreateEvent(new_repo)) | |
445 |
|
445 | |||
446 | self.sa.add(new_repo) |
|
446 | self.sa.add(new_repo) | |
447 |
|
447 | |||
448 | EMPTY_PERM = 'repository.none' |
|
448 | EMPTY_PERM = 'repository.none' | |
449 | if fork_of and copy_fork_permissions: |
|
449 | if fork_of and copy_fork_permissions: | |
450 | repo = fork_of |
|
450 | repo = fork_of | |
451 | user_perms = UserRepoToPerm.query() \ |
|
451 | user_perms = UserRepoToPerm.query() \ | |
452 | .filter(UserRepoToPerm.repository == repo).all() |
|
452 | .filter(UserRepoToPerm.repository == repo).all() | |
453 | group_perms = UserGroupRepoToPerm.query() \ |
|
453 | group_perms = UserGroupRepoToPerm.query() \ | |
454 | .filter(UserGroupRepoToPerm.repository == repo).all() |
|
454 | .filter(UserGroupRepoToPerm.repository == repo).all() | |
455 |
|
455 | |||
456 | for perm in user_perms: |
|
456 | for perm in user_perms: | |
457 | UserRepoToPerm.create( |
|
457 | UserRepoToPerm.create( | |
458 | perm.user, new_repo, perm.permission) |
|
458 | perm.user, new_repo, perm.permission) | |
459 |
|
459 | |||
460 | for perm in group_perms: |
|
460 | for perm in group_perms: | |
461 | UserGroupRepoToPerm.create( |
|
461 | UserGroupRepoToPerm.create( | |
462 | perm.users_group, new_repo, perm.permission) |
|
462 | perm.users_group, new_repo, perm.permission) | |
463 | # in case we copy permissions and also set this repo to private |
|
463 | # in case we copy permissions and also set this repo to private | |
464 | # override the default user permission to make it a private |
|
464 | # override the default user permission to make it a private | |
465 | # repo |
|
465 | # repo | |
466 | if private: |
|
466 | if private: | |
467 | RepoModel(self.sa).grant_user_permission( |
|
467 | RepoModel(self.sa).grant_user_permission( | |
468 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
468 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) | |
469 |
|
469 | |||
470 | elif repo_group and copy_group_permissions: |
|
470 | elif repo_group and copy_group_permissions: | |
471 | user_perms = UserRepoGroupToPerm.query() \ |
|
471 | user_perms = UserRepoGroupToPerm.query() \ | |
472 | .filter(UserRepoGroupToPerm.group == repo_group).all() |
|
472 | .filter(UserRepoGroupToPerm.group == repo_group).all() | |
473 |
|
473 | |||
474 | group_perms = UserGroupRepoGroupToPerm.query() \ |
|
474 | group_perms = UserGroupRepoGroupToPerm.query() \ | |
475 | .filter(UserGroupRepoGroupToPerm.group == repo_group).all() |
|
475 | .filter(UserGroupRepoGroupToPerm.group == repo_group).all() | |
476 |
|
476 | |||
477 | for perm in user_perms: |
|
477 | for perm in user_perms: | |
478 | perm_name = perm.permission.permission_name.replace( |
|
478 | perm_name = perm.permission.permission_name.replace( | |
479 | 'group.', 'repository.') |
|
479 | 'group.', 'repository.') | |
480 | perm_obj = Permission.get_by_key(perm_name) |
|
480 | perm_obj = Permission.get_by_key(perm_name) | |
481 | UserRepoToPerm.create(perm.user, new_repo, perm_obj) |
|
481 | UserRepoToPerm.create(perm.user, new_repo, perm_obj) | |
482 |
|
482 | |||
483 | for perm in group_perms: |
|
483 | for perm in group_perms: | |
484 | perm_name = perm.permission.permission_name.replace( |
|
484 | perm_name = perm.permission.permission_name.replace( | |
485 | 'group.', 'repository.') |
|
485 | 'group.', 'repository.') | |
486 | perm_obj = Permission.get_by_key(perm_name) |
|
486 | perm_obj = Permission.get_by_key(perm_name) | |
487 | UserGroupRepoToPerm.create( |
|
487 | UserGroupRepoToPerm.create( | |
488 | perm.users_group, new_repo, perm_obj) |
|
488 | perm.users_group, new_repo, perm_obj) | |
489 |
|
489 | |||
490 | if private: |
|
490 | if private: | |
491 | RepoModel(self.sa).grant_user_permission( |
|
491 | RepoModel(self.sa).grant_user_permission( | |
492 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
492 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) | |
493 |
|
493 | |||
494 | else: |
|
494 | else: | |
495 | perm_obj = self._create_default_perms(new_repo, private) |
|
495 | perm_obj = self._create_default_perms(new_repo, private) | |
496 | self.sa.add(perm_obj) |
|
496 | self.sa.add(perm_obj) | |
497 |
|
497 | |||
498 | # now automatically start following this repository as owner |
|
498 | # now automatically start following this repository as owner | |
499 | ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, |
|
499 | ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, | |
500 | owner.user_id) |
|
500 | owner.user_id) | |
501 |
|
501 | |||
502 | # we need to flush here, in order to check if database won't |
|
502 | # we need to flush here, in order to check if database won't | |
503 | # throw any exceptions, create filesystem dirs at the very end |
|
503 | # throw any exceptions, create filesystem dirs at the very end | |
504 | self.sa.flush() |
|
504 | self.sa.flush() | |
505 | events.trigger(events.RepoCreateEvent(new_repo)) |
|
505 | events.trigger(events.RepoCreateEvent(new_repo)) | |
506 | return new_repo |
|
506 | return new_repo | |
507 |
|
507 | |||
508 | except Exception: |
|
508 | except Exception: | |
509 | log.error(traceback.format_exc()) |
|
509 | log.error(traceback.format_exc()) | |
510 | raise |
|
510 | raise | |
511 |
|
511 | |||
512 | def create(self, form_data, cur_user): |
|
512 | def create(self, form_data, cur_user): | |
513 | """ |
|
513 | """ | |
514 | Create repository using celery tasks |
|
514 | Create repository using celery tasks | |
515 |
|
515 | |||
516 | :param form_data: |
|
516 | :param form_data: | |
517 | :param cur_user: |
|
517 | :param cur_user: | |
518 | """ |
|
518 | """ | |
519 | from rhodecode.lib.celerylib import tasks, run_task |
|
519 | from rhodecode.lib.celerylib import tasks, run_task | |
520 | return run_task(tasks.create_repo, form_data, cur_user) |
|
520 | return run_task(tasks.create_repo, form_data, cur_user) | |
521 |
|
521 | |||
522 | def update_permissions(self, repo, perm_additions=None, perm_updates=None, |
|
522 | def update_permissions(self, repo, perm_additions=None, perm_updates=None, | |
523 | perm_deletions=None, check_perms=True, |
|
523 | perm_deletions=None, check_perms=True, | |
524 | cur_user=None): |
|
524 | cur_user=None): | |
525 | if not perm_additions: |
|
525 | if not perm_additions: | |
526 | perm_additions = [] |
|
526 | perm_additions = [] | |
527 | if not perm_updates: |
|
527 | if not perm_updates: | |
528 | perm_updates = [] |
|
528 | perm_updates = [] | |
529 | if not perm_deletions: |
|
529 | if not perm_deletions: | |
530 | perm_deletions = [] |
|
530 | perm_deletions = [] | |
531 |
|
531 | |||
532 | req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin') |
|
532 | req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin') | |
533 |
|
533 | |||
534 | changes = { |
|
534 | changes = { | |
535 | 'added': [], |
|
535 | 'added': [], | |
536 | 'updated': [], |
|
536 | 'updated': [], | |
537 | 'deleted': [] |
|
537 | 'deleted': [] | |
538 | } |
|
538 | } | |
539 | # update permissions |
|
539 | # update permissions | |
540 | for member_id, perm, member_type in perm_updates: |
|
540 | for member_id, perm, member_type in perm_updates: | |
541 | member_id = int(member_id) |
|
541 | member_id = int(member_id) | |
542 | if member_type == 'user': |
|
542 | if member_type == 'user': | |
543 | member_name = User.get(member_id).username |
|
543 | member_name = User.get(member_id).username | |
544 | # this updates also current one if found |
|
544 | # this updates also current one if found | |
545 | self.grant_user_permission( |
|
545 | self.grant_user_permission( | |
546 | repo=repo, user=member_id, perm=perm) |
|
546 | repo=repo, user=member_id, perm=perm) | |
547 | elif member_type == 'user_group': |
|
547 | elif member_type == 'user_group': | |
548 | # check if we have permissions to alter this usergroup |
|
548 | # check if we have permissions to alter this usergroup | |
549 | member_name = UserGroup.get(member_id).users_group_name |
|
549 | member_name = UserGroup.get(member_id).users_group_name | |
550 | if not check_perms or HasUserGroupPermissionAny( |
|
550 | if not check_perms or HasUserGroupPermissionAny( | |
551 | *req_perms)(member_name, user=cur_user): |
|
551 | *req_perms)(member_name, user=cur_user): | |
552 | self.grant_user_group_permission( |
|
552 | self.grant_user_group_permission( | |
553 | repo=repo, group_name=member_id, perm=perm) |
|
553 | repo=repo, group_name=member_id, perm=perm) | |
554 | else: |
|
554 | else: | |
555 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
555 | raise ValueError("member_type must be 'user' or 'user_group' " | |
556 | "got {} instead".format(member_type)) |
|
556 | "got {} instead".format(member_type)) | |
557 | changes['updated'].append({'type': member_type, 'id': member_id, |
|
557 | changes['updated'].append({'type': member_type, 'id': member_id, | |
558 | 'name': member_name, 'new_perm': perm}) |
|
558 | 'name': member_name, 'new_perm': perm}) | |
559 |
|
559 | |||
560 | # set new permissions |
|
560 | # set new permissions | |
561 | for member_id, perm, member_type in perm_additions: |
|
561 | for member_id, perm, member_type in perm_additions: | |
562 | member_id = int(member_id) |
|
562 | member_id = int(member_id) | |
563 | if member_type == 'user': |
|
563 | if member_type == 'user': | |
564 | member_name = User.get(member_id).username |
|
564 | member_name = User.get(member_id).username | |
565 | self.grant_user_permission( |
|
565 | self.grant_user_permission( | |
566 | repo=repo, user=member_id, perm=perm) |
|
566 | repo=repo, user=member_id, perm=perm) | |
567 | elif member_type == 'user_group': |
|
567 | elif member_type == 'user_group': | |
568 | # check if we have permissions to alter this usergroup |
|
568 | # check if we have permissions to alter this usergroup | |
569 | member_name = UserGroup.get(member_id).users_group_name |
|
569 | member_name = UserGroup.get(member_id).users_group_name | |
570 | if not check_perms or HasUserGroupPermissionAny( |
|
570 | if not check_perms or HasUserGroupPermissionAny( | |
571 | *req_perms)(member_name, user=cur_user): |
|
571 | *req_perms)(member_name, user=cur_user): | |
572 | self.grant_user_group_permission( |
|
572 | self.grant_user_group_permission( | |
573 | repo=repo, group_name=member_id, perm=perm) |
|
573 | repo=repo, group_name=member_id, perm=perm) | |
574 | else: |
|
574 | else: | |
575 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
575 | raise ValueError("member_type must be 'user' or 'user_group' " | |
576 | "got {} instead".format(member_type)) |
|
576 | "got {} instead".format(member_type)) | |
577 |
|
577 | |||
578 | changes['added'].append({'type': member_type, 'id': member_id, |
|
578 | changes['added'].append({'type': member_type, 'id': member_id, | |
579 | 'name': member_name, 'new_perm': perm}) |
|
579 | 'name': member_name, 'new_perm': perm}) | |
580 | # delete permissions |
|
580 | # delete permissions | |
581 | for member_id, perm, member_type in perm_deletions: |
|
581 | for member_id, perm, member_type in perm_deletions: | |
582 | member_id = int(member_id) |
|
582 | member_id = int(member_id) | |
583 | if member_type == 'user': |
|
583 | if member_type == 'user': | |
584 | member_name = User.get(member_id).username |
|
584 | member_name = User.get(member_id).username | |
585 | self.revoke_user_permission(repo=repo, user=member_id) |
|
585 | self.revoke_user_permission(repo=repo, user=member_id) | |
586 | elif member_type == 'user_group': |
|
586 | elif member_type == 'user_group': | |
587 | # check if we have permissions to alter this usergroup |
|
587 | # check if we have permissions to alter this usergroup | |
588 | member_name = UserGroup.get(member_id).users_group_name |
|
588 | member_name = UserGroup.get(member_id).users_group_name | |
589 | if not check_perms or HasUserGroupPermissionAny( |
|
589 | if not check_perms or HasUserGroupPermissionAny( | |
590 | *req_perms)(member_name, user=cur_user): |
|
590 | *req_perms)(member_name, user=cur_user): | |
591 | self.revoke_user_group_permission( |
|
591 | self.revoke_user_group_permission( | |
592 | repo=repo, group_name=member_id) |
|
592 | repo=repo, group_name=member_id) | |
593 | else: |
|
593 | else: | |
594 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
594 | raise ValueError("member_type must be 'user' or 'user_group' " | |
595 | "got {} instead".format(member_type)) |
|
595 | "got {} instead".format(member_type)) | |
596 |
|
596 | |||
597 | changes['deleted'].append({'type': member_type, 'id': member_id, |
|
597 | changes['deleted'].append({'type': member_type, 'id': member_id, | |
598 | 'name': member_name, 'new_perm': perm}) |
|
598 | 'name': member_name, 'new_perm': perm}) | |
599 | return changes |
|
599 | return changes | |
600 |
|
600 | |||
601 | def create_fork(self, form_data, cur_user): |
|
601 | def create_fork(self, form_data, cur_user): | |
602 | """ |
|
602 | """ | |
603 | Simple wrapper into executing celery task for fork creation |
|
603 | Simple wrapper into executing celery task for fork creation | |
604 |
|
604 | |||
605 | :param form_data: |
|
605 | :param form_data: | |
606 | :param cur_user: |
|
606 | :param cur_user: | |
607 | """ |
|
607 | """ | |
608 | from rhodecode.lib.celerylib import tasks, run_task |
|
608 | from rhodecode.lib.celerylib import tasks, run_task | |
609 | return run_task(tasks.create_repo_fork, form_data, cur_user) |
|
609 | return run_task(tasks.create_repo_fork, form_data, cur_user) | |
610 |
|
610 | |||
611 | def delete(self, repo, forks=None, fs_remove=True, cur_user=None): |
|
611 | def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None): | |
612 | """ |
|
612 | """ | |
613 | Delete given repository, forks parameter defines what do do with |
|
613 | Delete given repository, forks parameter defines what do do with | |
614 | attached forks. Throws AttachedForksError if deleted repo has attached |
|
614 | attached forks. Throws AttachedForksError if deleted repo has attached | |
615 | forks |
|
615 | forks | |
616 |
|
616 | |||
617 | :param repo: |
|
617 | :param repo: | |
618 | :param forks: str 'delete' or 'detach' |
|
618 | :param forks: str 'delete' or 'detach' | |
619 | :param fs_remove: remove(archive) repo from filesystem |
|
619 | :param fs_remove: remove(archive) repo from filesystem | |
620 | """ |
|
620 | """ | |
621 | if not cur_user: |
|
621 | if not cur_user: | |
622 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
622 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) | |
623 | repo = self._get_repo(repo) |
|
623 | repo = self._get_repo(repo) | |
624 | if repo: |
|
624 | if repo: | |
625 | if forks == 'detach': |
|
625 | if forks == 'detach': | |
626 | for r in repo.forks: |
|
626 | for r in repo.forks: | |
627 | r.fork = None |
|
627 | r.fork = None | |
628 | self.sa.add(r) |
|
628 | self.sa.add(r) | |
629 | elif forks == 'delete': |
|
629 | elif forks == 'delete': | |
630 | for r in repo.forks: |
|
630 | for r in repo.forks: | |
631 | self.delete(r, forks='delete') |
|
631 | self.delete(r, forks='delete') | |
632 | elif [f for f in repo.forks]: |
|
632 | elif [f for f in repo.forks]: | |
633 | raise AttachedForksError() |
|
633 | raise AttachedForksError() | |
634 |
|
634 | |||
|
635 | # check for pull requests | |||
|
636 | pr_sources = repo.pull_requests_source | |||
|
637 | pr_targets = repo.pull_requests_target | |||
|
638 | if pull_requests != 'delete' and (pr_sources or pr_targets): | |||
|
639 | raise AttachedPullRequestsError() | |||
|
640 | ||||
635 | old_repo_dict = repo.get_dict() |
|
641 | old_repo_dict = repo.get_dict() | |
636 | events.trigger(events.RepoPreDeleteEvent(repo)) |
|
642 | events.trigger(events.RepoPreDeleteEvent(repo)) | |
637 | try: |
|
643 | try: | |
638 | self.sa.delete(repo) |
|
644 | self.sa.delete(repo) | |
639 | if fs_remove: |
|
645 | if fs_remove: | |
640 | self._delete_filesystem_repo(repo) |
|
646 | self._delete_filesystem_repo(repo) | |
641 | else: |
|
647 | else: | |
642 | log.debug('skipping removal from filesystem') |
|
648 | log.debug('skipping removal from filesystem') | |
643 | old_repo_dict.update({ |
|
649 | old_repo_dict.update({ | |
644 | 'deleted_by': cur_user, |
|
650 | 'deleted_by': cur_user, | |
645 | 'deleted_on': time.time(), |
|
651 | 'deleted_on': time.time(), | |
646 | }) |
|
652 | }) | |
647 | log_delete_repository(**old_repo_dict) |
|
653 | log_delete_repository(**old_repo_dict) | |
648 | events.trigger(events.RepoDeleteEvent(repo)) |
|
654 | events.trigger(events.RepoDeleteEvent(repo)) | |
649 | except Exception: |
|
655 | except Exception: | |
650 | log.error(traceback.format_exc()) |
|
656 | log.error(traceback.format_exc()) | |
651 | raise |
|
657 | raise | |
652 |
|
658 | |||
653 | def grant_user_permission(self, repo, user, perm): |
|
659 | def grant_user_permission(self, repo, user, perm): | |
654 | """ |
|
660 | """ | |
655 | Grant permission for user on given repository, or update existing one |
|
661 | Grant permission for user on given repository, or update existing one | |
656 | if found |
|
662 | if found | |
657 |
|
663 | |||
658 | :param repo: Instance of Repository, repository_id, or repository name |
|
664 | :param repo: Instance of Repository, repository_id, or repository name | |
659 | :param user: Instance of User, user_id or username |
|
665 | :param user: Instance of User, user_id or username | |
660 | :param perm: Instance of Permission, or permission_name |
|
666 | :param perm: Instance of Permission, or permission_name | |
661 | """ |
|
667 | """ | |
662 | user = self._get_user(user) |
|
668 | user = self._get_user(user) | |
663 | repo = self._get_repo(repo) |
|
669 | repo = self._get_repo(repo) | |
664 | permission = self._get_perm(perm) |
|
670 | permission = self._get_perm(perm) | |
665 |
|
671 | |||
666 | # check if we have that permission already |
|
672 | # check if we have that permission already | |
667 | obj = self.sa.query(UserRepoToPerm) \ |
|
673 | obj = self.sa.query(UserRepoToPerm) \ | |
668 | .filter(UserRepoToPerm.user == user) \ |
|
674 | .filter(UserRepoToPerm.user == user) \ | |
669 | .filter(UserRepoToPerm.repository == repo) \ |
|
675 | .filter(UserRepoToPerm.repository == repo) \ | |
670 | .scalar() |
|
676 | .scalar() | |
671 | if obj is None: |
|
677 | if obj is None: | |
672 | # create new ! |
|
678 | # create new ! | |
673 | obj = UserRepoToPerm() |
|
679 | obj = UserRepoToPerm() | |
674 | obj.repository = repo |
|
680 | obj.repository = repo | |
675 | obj.user = user |
|
681 | obj.user = user | |
676 | obj.permission = permission |
|
682 | obj.permission = permission | |
677 | self.sa.add(obj) |
|
683 | self.sa.add(obj) | |
678 | log.debug('Granted perm %s to %s on %s', perm, user, repo) |
|
684 | log.debug('Granted perm %s to %s on %s', perm, user, repo) | |
679 | action_logger_generic( |
|
685 | action_logger_generic( | |
680 | 'granted permission: {} to user: {} on repo: {}'.format( |
|
686 | 'granted permission: {} to user: {} on repo: {}'.format( | |
681 | perm, user, repo), namespace='security.repo') |
|
687 | perm, user, repo), namespace='security.repo') | |
682 | return obj |
|
688 | return obj | |
683 |
|
689 | |||
684 | def revoke_user_permission(self, repo, user): |
|
690 | def revoke_user_permission(self, repo, user): | |
685 | """ |
|
691 | """ | |
686 | Revoke permission for user on given repository |
|
692 | Revoke permission for user on given repository | |
687 |
|
693 | |||
688 | :param repo: Instance of Repository, repository_id, or repository name |
|
694 | :param repo: Instance of Repository, repository_id, or repository name | |
689 | :param user: Instance of User, user_id or username |
|
695 | :param user: Instance of User, user_id or username | |
690 | """ |
|
696 | """ | |
691 |
|
697 | |||
692 | user = self._get_user(user) |
|
698 | user = self._get_user(user) | |
693 | repo = self._get_repo(repo) |
|
699 | repo = self._get_repo(repo) | |
694 |
|
700 | |||
695 | obj = self.sa.query(UserRepoToPerm) \ |
|
701 | obj = self.sa.query(UserRepoToPerm) \ | |
696 | .filter(UserRepoToPerm.repository == repo) \ |
|
702 | .filter(UserRepoToPerm.repository == repo) \ | |
697 | .filter(UserRepoToPerm.user == user) \ |
|
703 | .filter(UserRepoToPerm.user == user) \ | |
698 | .scalar() |
|
704 | .scalar() | |
699 | if obj: |
|
705 | if obj: | |
700 | self.sa.delete(obj) |
|
706 | self.sa.delete(obj) | |
701 | log.debug('Revoked perm on %s on %s', repo, user) |
|
707 | log.debug('Revoked perm on %s on %s', repo, user) | |
702 | action_logger_generic( |
|
708 | action_logger_generic( | |
703 | 'revoked permission from user: {} on repo: {}'.format( |
|
709 | 'revoked permission from user: {} on repo: {}'.format( | |
704 | user, repo), namespace='security.repo') |
|
710 | user, repo), namespace='security.repo') | |
705 |
|
711 | |||
706 | def grant_user_group_permission(self, repo, group_name, perm): |
|
712 | def grant_user_group_permission(self, repo, group_name, perm): | |
707 | """ |
|
713 | """ | |
708 | Grant permission for user group on given repository, or update |
|
714 | Grant permission for user group on given repository, or update | |
709 | existing one if found |
|
715 | existing one if found | |
710 |
|
716 | |||
711 | :param repo: Instance of Repository, repository_id, or repository name |
|
717 | :param repo: Instance of Repository, repository_id, or repository name | |
712 | :param group_name: Instance of UserGroup, users_group_id, |
|
718 | :param group_name: Instance of UserGroup, users_group_id, | |
713 | or user group name |
|
719 | or user group name | |
714 | :param perm: Instance of Permission, or permission_name |
|
720 | :param perm: Instance of Permission, or permission_name | |
715 | """ |
|
721 | """ | |
716 | repo = self._get_repo(repo) |
|
722 | repo = self._get_repo(repo) | |
717 | group_name = self._get_user_group(group_name) |
|
723 | group_name = self._get_user_group(group_name) | |
718 | permission = self._get_perm(perm) |
|
724 | permission = self._get_perm(perm) | |
719 |
|
725 | |||
720 | # check if we have that permission already |
|
726 | # check if we have that permission already | |
721 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
727 | obj = self.sa.query(UserGroupRepoToPerm) \ | |
722 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
728 | .filter(UserGroupRepoToPerm.users_group == group_name) \ | |
723 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
729 | .filter(UserGroupRepoToPerm.repository == repo) \ | |
724 | .scalar() |
|
730 | .scalar() | |
725 |
|
731 | |||
726 | if obj is None: |
|
732 | if obj is None: | |
727 | # create new |
|
733 | # create new | |
728 | obj = UserGroupRepoToPerm() |
|
734 | obj = UserGroupRepoToPerm() | |
729 |
|
735 | |||
730 | obj.repository = repo |
|
736 | obj.repository = repo | |
731 | obj.users_group = group_name |
|
737 | obj.users_group = group_name | |
732 | obj.permission = permission |
|
738 | obj.permission = permission | |
733 | self.sa.add(obj) |
|
739 | self.sa.add(obj) | |
734 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) |
|
740 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) | |
735 | action_logger_generic( |
|
741 | action_logger_generic( | |
736 | 'granted permission: {} to usergroup: {} on repo: {}'.format( |
|
742 | 'granted permission: {} to usergroup: {} on repo: {}'.format( | |
737 | perm, group_name, repo), namespace='security.repo') |
|
743 | perm, group_name, repo), namespace='security.repo') | |
738 |
|
744 | |||
739 | return obj |
|
745 | return obj | |
740 |
|
746 | |||
741 | def revoke_user_group_permission(self, repo, group_name): |
|
747 | def revoke_user_group_permission(self, repo, group_name): | |
742 | """ |
|
748 | """ | |
743 | Revoke permission for user group on given repository |
|
749 | Revoke permission for user group on given repository | |
744 |
|
750 | |||
745 | :param repo: Instance of Repository, repository_id, or repository name |
|
751 | :param repo: Instance of Repository, repository_id, or repository name | |
746 | :param group_name: Instance of UserGroup, users_group_id, |
|
752 | :param group_name: Instance of UserGroup, users_group_id, | |
747 | or user group name |
|
753 | or user group name | |
748 | """ |
|
754 | """ | |
749 | repo = self._get_repo(repo) |
|
755 | repo = self._get_repo(repo) | |
750 | group_name = self._get_user_group(group_name) |
|
756 | group_name = self._get_user_group(group_name) | |
751 |
|
757 | |||
752 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
758 | obj = self.sa.query(UserGroupRepoToPerm) \ | |
753 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
759 | .filter(UserGroupRepoToPerm.repository == repo) \ | |
754 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
760 | .filter(UserGroupRepoToPerm.users_group == group_name) \ | |
755 | .scalar() |
|
761 | .scalar() | |
756 | if obj: |
|
762 | if obj: | |
757 | self.sa.delete(obj) |
|
763 | self.sa.delete(obj) | |
758 | log.debug('Revoked perm to %s on %s', repo, group_name) |
|
764 | log.debug('Revoked perm to %s on %s', repo, group_name) | |
759 | action_logger_generic( |
|
765 | action_logger_generic( | |
760 | 'revoked permission from usergroup: {} on repo: {}'.format( |
|
766 | 'revoked permission from usergroup: {} on repo: {}'.format( | |
761 | group_name, repo), namespace='security.repo') |
|
767 | group_name, repo), namespace='security.repo') | |
762 |
|
768 | |||
763 | def delete_stats(self, repo_name): |
|
769 | def delete_stats(self, repo_name): | |
764 | """ |
|
770 | """ | |
765 | removes stats for given repo |
|
771 | removes stats for given repo | |
766 |
|
772 | |||
767 | :param repo_name: |
|
773 | :param repo_name: | |
768 | """ |
|
774 | """ | |
769 | repo = self._get_repo(repo_name) |
|
775 | repo = self._get_repo(repo_name) | |
770 | try: |
|
776 | try: | |
771 | obj = self.sa.query(Statistics) \ |
|
777 | obj = self.sa.query(Statistics) \ | |
772 | .filter(Statistics.repository == repo).scalar() |
|
778 | .filter(Statistics.repository == repo).scalar() | |
773 | if obj: |
|
779 | if obj: | |
774 | self.sa.delete(obj) |
|
780 | self.sa.delete(obj) | |
775 | except Exception: |
|
781 | except Exception: | |
776 | log.error(traceback.format_exc()) |
|
782 | log.error(traceback.format_exc()) | |
777 | raise |
|
783 | raise | |
778 |
|
784 | |||
779 | def add_repo_field(self, repo_name, field_key, field_label, field_value='', |
|
785 | def add_repo_field(self, repo_name, field_key, field_label, field_value='', | |
780 | field_type='str', field_desc=''): |
|
786 | field_type='str', field_desc=''): | |
781 |
|
787 | |||
782 | repo = self._get_repo(repo_name) |
|
788 | repo = self._get_repo(repo_name) | |
783 |
|
789 | |||
784 | new_field = RepositoryField() |
|
790 | new_field = RepositoryField() | |
785 | new_field.repository = repo |
|
791 | new_field.repository = repo | |
786 | new_field.field_key = field_key |
|
792 | new_field.field_key = field_key | |
787 | new_field.field_type = field_type # python type |
|
793 | new_field.field_type = field_type # python type | |
788 | new_field.field_value = field_value |
|
794 | new_field.field_value = field_value | |
789 | new_field.field_desc = field_desc |
|
795 | new_field.field_desc = field_desc | |
790 | new_field.field_label = field_label |
|
796 | new_field.field_label = field_label | |
791 | self.sa.add(new_field) |
|
797 | self.sa.add(new_field) | |
792 | return new_field |
|
798 | return new_field | |
793 |
|
799 | |||
794 | def delete_repo_field(self, repo_name, field_key): |
|
800 | def delete_repo_field(self, repo_name, field_key): | |
795 | repo = self._get_repo(repo_name) |
|
801 | repo = self._get_repo(repo_name) | |
796 | field = RepositoryField.get_by_key_name(field_key, repo) |
|
802 | field = RepositoryField.get_by_key_name(field_key, repo) | |
797 | if field: |
|
803 | if field: | |
798 | self.sa.delete(field) |
|
804 | self.sa.delete(field) | |
799 |
|
805 | |||
800 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, |
|
806 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, | |
801 | clone_uri=None, repo_store_location=None, |
|
807 | clone_uri=None, repo_store_location=None, | |
802 | use_global_config=False): |
|
808 | use_global_config=False): | |
803 | """ |
|
809 | """ | |
804 | makes repository on filesystem. It's group aware means it'll create |
|
810 | makes repository on filesystem. It's group aware means it'll create | |
805 | a repository within a group, and alter the paths accordingly of |
|
811 | a repository within a group, and alter the paths accordingly of | |
806 | group location |
|
812 | group location | |
807 |
|
813 | |||
808 | :param repo_name: |
|
814 | :param repo_name: | |
809 | :param alias: |
|
815 | :param alias: | |
810 | :param parent: |
|
816 | :param parent: | |
811 | :param clone_uri: |
|
817 | :param clone_uri: | |
812 | :param repo_store_location: |
|
818 | :param repo_store_location: | |
813 | """ |
|
819 | """ | |
814 | from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group |
|
820 | from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group | |
815 | from rhodecode.model.scm import ScmModel |
|
821 | from rhodecode.model.scm import ScmModel | |
816 |
|
822 | |||
817 | if Repository.NAME_SEP in repo_name: |
|
823 | if Repository.NAME_SEP in repo_name: | |
818 | raise ValueError( |
|
824 | raise ValueError( | |
819 | 'repo_name must not contain groups got `%s`' % repo_name) |
|
825 | 'repo_name must not contain groups got `%s`' % repo_name) | |
820 |
|
826 | |||
821 | if isinstance(repo_group, RepoGroup): |
|
827 | if isinstance(repo_group, RepoGroup): | |
822 | new_parent_path = os.sep.join(repo_group.full_path_splitted) |
|
828 | new_parent_path = os.sep.join(repo_group.full_path_splitted) | |
823 | else: |
|
829 | else: | |
824 | new_parent_path = repo_group or '' |
|
830 | new_parent_path = repo_group or '' | |
825 |
|
831 | |||
826 | if repo_store_location: |
|
832 | if repo_store_location: | |
827 | _paths = [repo_store_location] |
|
833 | _paths = [repo_store_location] | |
828 | else: |
|
834 | else: | |
829 | _paths = [self.repos_path, new_parent_path, repo_name] |
|
835 | _paths = [self.repos_path, new_parent_path, repo_name] | |
830 | # we need to make it str for mercurial |
|
836 | # we need to make it str for mercurial | |
831 | repo_path = os.path.join(*map(lambda x: safe_str(x), _paths)) |
|
837 | repo_path = os.path.join(*map(lambda x: safe_str(x), _paths)) | |
832 |
|
838 | |||
833 | # check if this path is not a repository |
|
839 | # check if this path is not a repository | |
834 | if is_valid_repo(repo_path, self.repos_path): |
|
840 | if is_valid_repo(repo_path, self.repos_path): | |
835 | raise Exception('This path %s is a valid repository' % repo_path) |
|
841 | raise Exception('This path %s is a valid repository' % repo_path) | |
836 |
|
842 | |||
837 | # check if this path is a group |
|
843 | # check if this path is a group | |
838 | if is_valid_repo_group(repo_path, self.repos_path): |
|
844 | if is_valid_repo_group(repo_path, self.repos_path): | |
839 | raise Exception('This path %s is a valid group' % repo_path) |
|
845 | raise Exception('This path %s is a valid group' % repo_path) | |
840 |
|
846 | |||
841 | log.info('creating repo %s in %s from url: `%s`', |
|
847 | log.info('creating repo %s in %s from url: `%s`', | |
842 | repo_name, safe_unicode(repo_path), |
|
848 | repo_name, safe_unicode(repo_path), | |
843 | obfuscate_url_pw(clone_uri)) |
|
849 | obfuscate_url_pw(clone_uri)) | |
844 |
|
850 | |||
845 | backend = get_backend(repo_type) |
|
851 | backend = get_backend(repo_type) | |
846 |
|
852 | |||
847 | config_repo = None if use_global_config else repo_name |
|
853 | config_repo = None if use_global_config else repo_name | |
848 | if config_repo and new_parent_path: |
|
854 | if config_repo and new_parent_path: | |
849 | config_repo = Repository.NAME_SEP.join( |
|
855 | config_repo = Repository.NAME_SEP.join( | |
850 | (new_parent_path, config_repo)) |
|
856 | (new_parent_path, config_repo)) | |
851 | config = make_db_config(clear_session=False, repo=config_repo) |
|
857 | config = make_db_config(clear_session=False, repo=config_repo) | |
852 | config.set('extensions', 'largefiles', '') |
|
858 | config.set('extensions', 'largefiles', '') | |
853 |
|
859 | |||
854 | # patch and reset hooks section of UI config to not run any |
|
860 | # patch and reset hooks section of UI config to not run any | |
855 | # hooks on creating remote repo |
|
861 | # hooks on creating remote repo | |
856 | config.clear_section('hooks') |
|
862 | config.clear_section('hooks') | |
857 |
|
863 | |||
858 | # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice |
|
864 | # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice | |
859 | if repo_type == 'git': |
|
865 | if repo_type == 'git': | |
860 | repo = backend( |
|
866 | repo = backend( | |
861 | repo_path, config=config, create=True, src_url=clone_uri, |
|
867 | repo_path, config=config, create=True, src_url=clone_uri, | |
862 | bare=True) |
|
868 | bare=True) | |
863 | else: |
|
869 | else: | |
864 | repo = backend( |
|
870 | repo = backend( | |
865 | repo_path, config=config, create=True, src_url=clone_uri) |
|
871 | repo_path, config=config, create=True, src_url=clone_uri) | |
866 |
|
872 | |||
867 | repo.install_hooks() |
|
873 | repo.install_hooks() | |
868 |
|
874 | |||
869 | log.debug('Created repo %s with %s backend', |
|
875 | log.debug('Created repo %s with %s backend', | |
870 | safe_unicode(repo_name), safe_unicode(repo_type)) |
|
876 | safe_unicode(repo_name), safe_unicode(repo_type)) | |
871 | return repo |
|
877 | return repo | |
872 |
|
878 | |||
873 | def _rename_filesystem_repo(self, old, new): |
|
879 | def _rename_filesystem_repo(self, old, new): | |
874 | """ |
|
880 | """ | |
875 | renames repository on filesystem |
|
881 | renames repository on filesystem | |
876 |
|
882 | |||
877 | :param old: old name |
|
883 | :param old: old name | |
878 | :param new: new name |
|
884 | :param new: new name | |
879 | """ |
|
885 | """ | |
880 | log.info('renaming repo from %s to %s', old, new) |
|
886 | log.info('renaming repo from %s to %s', old, new) | |
881 |
|
887 | |||
882 | old_path = os.path.join(self.repos_path, old) |
|
888 | old_path = os.path.join(self.repos_path, old) | |
883 | new_path = os.path.join(self.repos_path, new) |
|
889 | new_path = os.path.join(self.repos_path, new) | |
884 | if os.path.isdir(new_path): |
|
890 | if os.path.isdir(new_path): | |
885 | raise Exception( |
|
891 | raise Exception( | |
886 | 'Was trying to rename to already existing dir %s' % new_path |
|
892 | 'Was trying to rename to already existing dir %s' % new_path | |
887 | ) |
|
893 | ) | |
888 | shutil.move(old_path, new_path) |
|
894 | shutil.move(old_path, new_path) | |
889 |
|
895 | |||
890 | def _delete_filesystem_repo(self, repo): |
|
896 | def _delete_filesystem_repo(self, repo): | |
891 | """ |
|
897 | """ | |
892 | removes repo from filesystem, the removal is acctually made by |
|
898 | removes repo from filesystem, the removal is acctually made by | |
893 | added rm__ prefix into dir, and rename internat .hg/.git dirs so this |
|
899 | added rm__ prefix into dir, and rename internat .hg/.git dirs so this | |
894 | repository is no longer valid for rhodecode, can be undeleted later on |
|
900 | repository is no longer valid for rhodecode, can be undeleted later on | |
895 | by reverting the renames on this repository |
|
901 | by reverting the renames on this repository | |
896 |
|
902 | |||
897 | :param repo: repo object |
|
903 | :param repo: repo object | |
898 | """ |
|
904 | """ | |
899 | rm_path = os.path.join(self.repos_path, repo.repo_name) |
|
905 | rm_path = os.path.join(self.repos_path, repo.repo_name) | |
900 | repo_group = repo.group |
|
906 | repo_group = repo.group | |
901 | log.info("Removing repository %s", rm_path) |
|
907 | log.info("Removing repository %s", rm_path) | |
902 | # disable hg/git internal that it doesn't get detected as repo |
|
908 | # disable hg/git internal that it doesn't get detected as repo | |
903 | alias = repo.repo_type |
|
909 | alias = repo.repo_type | |
904 |
|
910 | |||
905 | config = make_db_config(clear_session=False) |
|
911 | config = make_db_config(clear_session=False) | |
906 | config.set('extensions', 'largefiles', '') |
|
912 | config.set('extensions', 'largefiles', '') | |
907 | bare = getattr(repo.scm_instance(config=config), 'bare', False) |
|
913 | bare = getattr(repo.scm_instance(config=config), 'bare', False) | |
908 |
|
914 | |||
909 | # skip this for bare git repos |
|
915 | # skip this for bare git repos | |
910 | if not bare: |
|
916 | if not bare: | |
911 | # disable VCS repo |
|
917 | # disable VCS repo | |
912 | vcs_path = os.path.join(rm_path, '.%s' % alias) |
|
918 | vcs_path = os.path.join(rm_path, '.%s' % alias) | |
913 | if os.path.exists(vcs_path): |
|
919 | if os.path.exists(vcs_path): | |
914 | shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias)) |
|
920 | shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias)) | |
915 |
|
921 | |||
916 | _now = datetime.datetime.now() |
|
922 | _now = datetime.datetime.now() | |
917 | _ms = str(_now.microsecond).rjust(6, '0') |
|
923 | _ms = str(_now.microsecond).rjust(6, '0') | |
918 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), |
|
924 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), | |
919 | repo.just_name) |
|
925 | repo.just_name) | |
920 | if repo_group: |
|
926 | if repo_group: | |
921 | # if repository is in group, prefix the removal path with the group |
|
927 | # if repository is in group, prefix the removal path with the group | |
922 | args = repo_group.full_path_splitted + [_d] |
|
928 | args = repo_group.full_path_splitted + [_d] | |
923 | _d = os.path.join(*args) |
|
929 | _d = os.path.join(*args) | |
924 |
|
930 | |||
925 | if os.path.isdir(rm_path): |
|
931 | if os.path.isdir(rm_path): | |
926 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) |
|
932 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) | |
927 |
|
933 | |||
928 | # finally cleanup diff-cache if it exists |
|
934 | # finally cleanup diff-cache if it exists | |
929 | cached_diffs_dir = repo.cached_diffs_dir |
|
935 | cached_diffs_dir = repo.cached_diffs_dir | |
930 | if os.path.isdir(cached_diffs_dir): |
|
936 | if os.path.isdir(cached_diffs_dir): | |
931 | shutil.rmtree(cached_diffs_dir) |
|
937 | shutil.rmtree(cached_diffs_dir) | |
932 |
|
938 | |||
933 |
|
939 | |||
934 | class ReadmeFinder: |
|
940 | class ReadmeFinder: | |
935 | """ |
|
941 | """ | |
936 | Utility which knows how to find a readme for a specific commit. |
|
942 | Utility which knows how to find a readme for a specific commit. | |
937 |
|
943 | |||
938 | The main idea is that this is a configurable algorithm. When creating an |
|
944 | The main idea is that this is a configurable algorithm. When creating an | |
939 | instance you can define parameters, currently only the `default_renderer`. |
|
945 | instance you can define parameters, currently only the `default_renderer`. | |
940 | Based on this configuration the method :meth:`search` behaves slightly |
|
946 | Based on this configuration the method :meth:`search` behaves slightly | |
941 | different. |
|
947 | different. | |
942 | """ |
|
948 | """ | |
943 |
|
949 | |||
944 | readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE) |
|
950 | readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE) | |
945 | path_re = re.compile(r'^docs?', re.IGNORECASE) |
|
951 | path_re = re.compile(r'^docs?', re.IGNORECASE) | |
946 |
|
952 | |||
947 | default_priorities = { |
|
953 | default_priorities = { | |
948 | None: 0, |
|
954 | None: 0, | |
949 | '.text': 2, |
|
955 | '.text': 2, | |
950 | '.txt': 3, |
|
956 | '.txt': 3, | |
951 | '.rst': 1, |
|
957 | '.rst': 1, | |
952 | '.rest': 2, |
|
958 | '.rest': 2, | |
953 | '.md': 1, |
|
959 | '.md': 1, | |
954 | '.mkdn': 2, |
|
960 | '.mkdn': 2, | |
955 | '.mdown': 3, |
|
961 | '.mdown': 3, | |
956 | '.markdown': 4, |
|
962 | '.markdown': 4, | |
957 | } |
|
963 | } | |
958 |
|
964 | |||
959 | path_priority = { |
|
965 | path_priority = { | |
960 | 'doc': 0, |
|
966 | 'doc': 0, | |
961 | 'docs': 1, |
|
967 | 'docs': 1, | |
962 | } |
|
968 | } | |
963 |
|
969 | |||
964 | FALLBACK_PRIORITY = 99 |
|
970 | FALLBACK_PRIORITY = 99 | |
965 |
|
971 | |||
966 | RENDERER_TO_EXTENSION = { |
|
972 | RENDERER_TO_EXTENSION = { | |
967 | 'rst': ['.rst', '.rest'], |
|
973 | 'rst': ['.rst', '.rest'], | |
968 | 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'], |
|
974 | 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'], | |
969 | } |
|
975 | } | |
970 |
|
976 | |||
971 | def __init__(self, default_renderer=None): |
|
977 | def __init__(self, default_renderer=None): | |
972 | self._default_renderer = default_renderer |
|
978 | self._default_renderer = default_renderer | |
973 | self._renderer_extensions = self.RENDERER_TO_EXTENSION.get( |
|
979 | self._renderer_extensions = self.RENDERER_TO_EXTENSION.get( | |
974 | default_renderer, []) |
|
980 | default_renderer, []) | |
975 |
|
981 | |||
976 | def search(self, commit, path='/'): |
|
982 | def search(self, commit, path='/'): | |
977 | """ |
|
983 | """ | |
978 | Find a readme in the given `commit`. |
|
984 | Find a readme in the given `commit`. | |
979 | """ |
|
985 | """ | |
980 | nodes = commit.get_nodes(path) |
|
986 | nodes = commit.get_nodes(path) | |
981 | matches = self._match_readmes(nodes) |
|
987 | matches = self._match_readmes(nodes) | |
982 | matches = self._sort_according_to_priority(matches) |
|
988 | matches = self._sort_according_to_priority(matches) | |
983 | if matches: |
|
989 | if matches: | |
984 | return matches[0].node |
|
990 | return matches[0].node | |
985 |
|
991 | |||
986 | paths = self._match_paths(nodes) |
|
992 | paths = self._match_paths(nodes) | |
987 | paths = self._sort_paths_according_to_priority(paths) |
|
993 | paths = self._sort_paths_according_to_priority(paths) | |
988 | for path in paths: |
|
994 | for path in paths: | |
989 | match = self.search(commit, path=path) |
|
995 | match = self.search(commit, path=path) | |
990 | if match: |
|
996 | if match: | |
991 | return match |
|
997 | return match | |
992 |
|
998 | |||
993 | return None |
|
999 | return None | |
994 |
|
1000 | |||
995 | def _match_readmes(self, nodes): |
|
1001 | def _match_readmes(self, nodes): | |
996 | for node in nodes: |
|
1002 | for node in nodes: | |
997 | if not node.is_file(): |
|
1003 | if not node.is_file(): | |
998 | continue |
|
1004 | continue | |
999 | path = node.path.rsplit('/', 1)[-1] |
|
1005 | path = node.path.rsplit('/', 1)[-1] | |
1000 | match = self.readme_re.match(path) |
|
1006 | match = self.readme_re.match(path) | |
1001 | if match: |
|
1007 | if match: | |
1002 | extension = match.group(1) |
|
1008 | extension = match.group(1) | |
1003 | yield ReadmeMatch(node, match, self._priority(extension)) |
|
1009 | yield ReadmeMatch(node, match, self._priority(extension)) | |
1004 |
|
1010 | |||
1005 | def _match_paths(self, nodes): |
|
1011 | def _match_paths(self, nodes): | |
1006 | for node in nodes: |
|
1012 | for node in nodes: | |
1007 | if not node.is_dir(): |
|
1013 | if not node.is_dir(): | |
1008 | continue |
|
1014 | continue | |
1009 | match = self.path_re.match(node.path) |
|
1015 | match = self.path_re.match(node.path) | |
1010 | if match: |
|
1016 | if match: | |
1011 | yield node.path |
|
1017 | yield node.path | |
1012 |
|
1018 | |||
1013 | def _priority(self, extension): |
|
1019 | def _priority(self, extension): | |
1014 | renderer_priority = ( |
|
1020 | renderer_priority = ( | |
1015 | 0 if extension in self._renderer_extensions else 1) |
|
1021 | 0 if extension in self._renderer_extensions else 1) | |
1016 | extension_priority = self.default_priorities.get( |
|
1022 | extension_priority = self.default_priorities.get( | |
1017 | extension, self.FALLBACK_PRIORITY) |
|
1023 | extension, self.FALLBACK_PRIORITY) | |
1018 | return (renderer_priority, extension_priority) |
|
1024 | return (renderer_priority, extension_priority) | |
1019 |
|
1025 | |||
1020 | def _sort_according_to_priority(self, matches): |
|
1026 | def _sort_according_to_priority(self, matches): | |
1021 |
|
1027 | |||
1022 | def priority_and_path(match): |
|
1028 | def priority_and_path(match): | |
1023 | return (match.priority, match.path) |
|
1029 | return (match.priority, match.path) | |
1024 |
|
1030 | |||
1025 | return sorted(matches, key=priority_and_path) |
|
1031 | return sorted(matches, key=priority_and_path) | |
1026 |
|
1032 | |||
1027 | def _sort_paths_according_to_priority(self, paths): |
|
1033 | def _sort_paths_according_to_priority(self, paths): | |
1028 |
|
1034 | |||
1029 | def priority_and_path(path): |
|
1035 | def priority_and_path(path): | |
1030 | return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path) |
|
1036 | return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path) | |
1031 |
|
1037 | |||
1032 | return sorted(paths, key=priority_and_path) |
|
1038 | return sorted(paths, key=priority_and_path) | |
1033 |
|
1039 | |||
1034 |
|
1040 | |||
1035 | class ReadmeMatch: |
|
1041 | class ReadmeMatch: | |
1036 |
|
1042 | |||
1037 | def __init__(self, node, match, priority): |
|
1043 | def __init__(self, node, match, priority): | |
1038 | self.node = node |
|
1044 | self.node = node | |
1039 | self._match = match |
|
1045 | self._match = match | |
1040 | self.priority = priority |
|
1046 | self.priority = priority | |
1041 |
|
1047 | |||
1042 | @property |
|
1048 | @property | |
1043 | def path(self): |
|
1049 | def path(self): | |
1044 | return self.node.path |
|
1050 | return self.node.path | |
1045 |
|
1051 | |||
1046 | def __repr__(self): |
|
1052 | def __repr__(self): | |
1047 | return '<ReadmeMatch {} priority={}'.format(self.path, self.priority) |
|
1053 | return '<ReadmeMatch {} priority={}'.format(self.path, self.priority) |
@@ -1,211 +1,225 b'' | |||||
1 | <%namespace name="base" file="/base/base.mako"/> |
|
1 | <%namespace name="base" file="/base/base.mako"/> | |
2 |
|
2 | |||
3 | <% |
|
3 | <% | |
4 | elems = [ |
|
4 | elems = [ | |
5 | (_('Owner'), lambda:base.gravatar_with_user(c.rhodecode_db_repo.user.email), '', ''), |
|
5 | (_('Owner'), lambda:base.gravatar_with_user(c.rhodecode_db_repo.user.email), '', ''), | |
6 | (_('Created on'), h.format_date(c.rhodecode_db_repo.created_on), '', ''), |
|
6 | (_('Created on'), h.format_date(c.rhodecode_db_repo.created_on), '', ''), | |
7 | (_('Updated on'), h.format_date(c.rhodecode_db_repo.updated_on), '', ''), |
|
7 | (_('Updated on'), h.format_date(c.rhodecode_db_repo.updated_on), '', ''), | |
8 | (_('Cached Commit id'), lambda: h.link_to(c.rhodecode_db_repo.changeset_cache.get('short_id'), h.route_path('repo_commit',repo_name=c.repo_name,commit_id=c.rhodecode_db_repo.changeset_cache.get('raw_id'))), '', ''), |
|
8 | (_('Cached Commit id'), lambda: h.link_to(c.rhodecode_db_repo.changeset_cache.get('short_id'), h.route_path('repo_commit',repo_name=c.repo_name,commit_id=c.rhodecode_db_repo.changeset_cache.get('raw_id'))), '', ''), | |
9 | (_('Attached scoped tokens'), len(c.rhodecode_db_repo.scoped_tokens), '', [x.user for x in c.rhodecode_db_repo.scoped_tokens]), |
|
9 | (_('Attached scoped tokens'), len(c.rhodecode_db_repo.scoped_tokens), '', [x.user for x in c.rhodecode_db_repo.scoped_tokens]), | |
|
10 | (_('Pull requests source'), len(c.rhodecode_db_repo.pull_requests_source), '', ['pr_id:{}, repo:{}'.format(x.pull_request_id,x.source_repo.repo_name) for x in c.rhodecode_db_repo.pull_requests_source]), | |||
|
11 | (_('Pull requests target'), len(c.rhodecode_db_repo.pull_requests_target), '', ['pr_id:{}, repo:{}'.format(x.pull_request_id,x.target_repo.repo_name) for x in c.rhodecode_db_repo.pull_requests_target]), | |||
10 | ] |
|
12 | ] | |
11 | %> |
|
13 | %> | |
12 |
|
14 | |||
13 | <div class="panel panel-default"> |
|
15 | <div class="panel panel-default"> | |
14 | <div class="panel-heading" id="advanced-info" > |
|
16 | <div class="panel-heading" id="advanced-info" > | |
15 | <h3 class="panel-title">${_('Repository: %s') % c.rhodecode_db_repo.repo_name} <a class="permalink" href="#advanced-info"> ΒΆ</a></h3> |
|
17 | <h3 class="panel-title">${_('Repository: %s') % c.rhodecode_db_repo.repo_name} <a class="permalink" href="#advanced-info"> ΒΆ</a></h3> | |
16 | </div> |
|
18 | </div> | |
17 | <div class="panel-body"> |
|
19 | <div class="panel-body"> | |
18 | ${base.dt_info_panel(elems)} |
|
20 | ${base.dt_info_panel(elems)} | |
19 | </div> |
|
21 | </div> | |
20 | </div> |
|
22 | </div> | |
21 |
|
23 | |||
22 |
|
24 | |||
23 | <div class="panel panel-default"> |
|
25 | <div class="panel panel-default"> | |
24 | <div class="panel-heading" id="advanced-fork"> |
|
26 | <div class="panel-heading" id="advanced-fork"> | |
25 | <h3 class="panel-title">${_('Fork Reference')} <a class="permalink" href="#advanced-fork"> ΒΆ</a></h3> |
|
27 | <h3 class="panel-title">${_('Fork Reference')} <a class="permalink" href="#advanced-fork"> ΒΆ</a></h3> | |
26 | </div> |
|
28 | </div> | |
27 | <div class="panel-body"> |
|
29 | <div class="panel-body"> | |
28 | ${h.secure_form(h.route_path('edit_repo_advanced_fork', repo_name=c.rhodecode_db_repo.repo_name), request=request)} |
|
30 | ${h.secure_form(h.route_path('edit_repo_advanced_fork', repo_name=c.rhodecode_db_repo.repo_name), request=request)} | |
29 |
|
31 | |||
30 | % if c.rhodecode_db_repo.fork: |
|
32 | % if c.rhodecode_db_repo.fork: | |
31 | <div class="panel-body-title-text">${h.literal(_('This repository is a fork of %(repo_link)s') % {'repo_link': h.link_to_if(c.has_origin_repo_read_perm,c.rhodecode_db_repo.fork.repo_name, h.route_path('repo_summary', repo_name=c.rhodecode_db_repo.fork.repo_name))})} |
|
33 | <div class="panel-body-title-text">${h.literal(_('This repository is a fork of %(repo_link)s') % {'repo_link': h.link_to_if(c.has_origin_repo_read_perm,c.rhodecode_db_repo.fork.repo_name, h.route_path('repo_summary', repo_name=c.rhodecode_db_repo.fork.repo_name))})} | |
32 | | <button class="btn btn-link btn-danger" type="submit">Remove fork reference</button></div> |
|
34 | | <button class="btn btn-link btn-danger" type="submit">Remove fork reference</button></div> | |
33 | % endif |
|
35 | % endif | |
34 |
|
36 | |||
35 | <div class="field"> |
|
37 | <div class="field"> | |
36 | ${h.hidden('id_fork_of')} |
|
38 | ${h.hidden('id_fork_of')} | |
37 | ${h.submit('set_as_fork_%s' % c.rhodecode_db_repo.repo_name,_('Set'),class_="btn btn-small",)} |
|
39 | ${h.submit('set_as_fork_%s' % c.rhodecode_db_repo.repo_name,_('Set'),class_="btn btn-small",)} | |
38 | </div> |
|
40 | </div> | |
39 | <div class="field"> |
|
41 | <div class="field"> | |
40 | <span class="help-block">${_('Manually set this repository as a fork of another from the list')}</span> |
|
42 | <span class="help-block">${_('Manually set this repository as a fork of another from the list')}</span> | |
41 | </div> |
|
43 | </div> | |
42 | ${h.end_form()} |
|
44 | ${h.end_form()} | |
43 | </div> |
|
45 | </div> | |
44 | </div> |
|
46 | </div> | |
45 |
|
47 | |||
46 |
|
48 | |||
47 | <div class="panel panel-default"> |
|
49 | <div class="panel panel-default"> | |
48 | <div class="panel-heading" id="advanced-journal"> |
|
50 | <div class="panel-heading" id="advanced-journal"> | |
49 | <h3 class="panel-title">${_('Public Journal Visibility')} <a class="permalink" href="#advanced-journal"> ΒΆ</a></h3> |
|
51 | <h3 class="panel-title">${_('Public Journal Visibility')} <a class="permalink" href="#advanced-journal"> ΒΆ</a></h3> | |
50 | </div> |
|
52 | </div> | |
51 | <div class="panel-body"> |
|
53 | <div class="panel-body"> | |
52 | ${h.secure_form(h.route_path('edit_repo_advanced_journal', repo_name=c.rhodecode_db_repo.repo_name), request=request)} |
|
54 | ${h.secure_form(h.route_path('edit_repo_advanced_journal', repo_name=c.rhodecode_db_repo.repo_name), request=request)} | |
53 | <div class="field"> |
|
55 | <div class="field"> | |
54 | %if c.in_public_journal: |
|
56 | %if c.in_public_journal: | |
55 | <button class="btn btn-small" type="submit"> |
|
57 | <button class="btn btn-small" type="submit"> | |
56 | ${_('Remove from Public Journal')} |
|
58 | ${_('Remove from Public Journal')} | |
57 | </button> |
|
59 | </button> | |
58 | %else: |
|
60 | %else: | |
59 | <button class="btn btn-small" type="submit"> |
|
61 | <button class="btn btn-small" type="submit"> | |
60 | ${_('Add to Public Journal')} |
|
62 | ${_('Add to Public Journal')} | |
61 | </button> |
|
63 | </button> | |
62 | %endif |
|
64 | %endif | |
63 | </div> |
|
65 | </div> | |
64 | <div class="field" > |
|
66 | <div class="field" > | |
65 | <span class="help-block">${_('All actions made on this repository will be visible to everyone following the public journal.')}</span> |
|
67 | <span class="help-block">${_('All actions made on this repository will be visible to everyone following the public journal.')}</span> | |
66 | </div> |
|
68 | </div> | |
67 | ${h.end_form()} |
|
69 | ${h.end_form()} | |
68 | </div> |
|
70 | </div> | |
69 | </div> |
|
71 | </div> | |
70 |
|
72 | |||
71 |
|
73 | |||
72 | <div class="panel panel-default"> |
|
74 | <div class="panel panel-default"> | |
73 | <div class="panel-heading" id="advanced-locking"> |
|
75 | <div class="panel-heading" id="advanced-locking"> | |
74 | <h3 class="panel-title">${_('Locking state')} <a class="permalink" href="#advanced-locking"> ΒΆ</a></h3> |
|
76 | <h3 class="panel-title">${_('Locking state')} <a class="permalink" href="#advanced-locking"> ΒΆ</a></h3> | |
75 | </div> |
|
77 | </div> | |
76 | <div class="panel-body"> |
|
78 | <div class="panel-body"> | |
77 | ${h.secure_form(h.route_path('edit_repo_advanced_locking', repo_name=c.rhodecode_db_repo.repo_name), request=request)} |
|
79 | ${h.secure_form(h.route_path('edit_repo_advanced_locking', repo_name=c.rhodecode_db_repo.repo_name), request=request)} | |
78 |
|
80 | |||
79 | %if c.rhodecode_db_repo.locked[0]: |
|
81 | %if c.rhodecode_db_repo.locked[0]: | |
80 | <div class="panel-body-title-text">${'Locked by %s on %s. Lock reason: %s' % (h.person_by_id(c.rhodecode_db_repo.locked[0]), |
|
82 | <div class="panel-body-title-text">${'Locked by %s on %s. Lock reason: %s' % (h.person_by_id(c.rhodecode_db_repo.locked[0]), | |
81 | h.format_date(h. time_to_datetime(c.rhodecode_db_repo.locked[1])), c.rhodecode_db_repo.locked[2])}</div> |
|
83 | h.format_date(h. time_to_datetime(c.rhodecode_db_repo.locked[1])), c.rhodecode_db_repo.locked[2])}</div> | |
82 | %else: |
|
84 | %else: | |
83 | <div class="panel-body-title-text">${_('This Repository is not currently locked.')}</div> |
|
85 | <div class="panel-body-title-text">${_('This Repository is not currently locked.')}</div> | |
84 | %endif |
|
86 | %endif | |
85 |
|
87 | |||
86 | <div class="field" > |
|
88 | <div class="field" > | |
87 | %if c.rhodecode_db_repo.locked[0]: |
|
89 | %if c.rhodecode_db_repo.locked[0]: | |
88 | ${h.hidden('set_unlock', '1')} |
|
90 | ${h.hidden('set_unlock', '1')} | |
89 | <button class="btn btn-small" type="submit" |
|
91 | <button class="btn btn-small" type="submit" | |
90 | onclick="return confirm('${_('Confirm to unlock repository.')}');"> |
|
92 | onclick="return confirm('${_('Confirm to unlock repository.')}');"> | |
91 | <i class="icon-unlock"></i> |
|
93 | <i class="icon-unlock"></i> | |
92 | ${_('Unlock repository')} |
|
94 | ${_('Unlock repository')} | |
93 | </button> |
|
95 | </button> | |
94 | %else: |
|
96 | %else: | |
95 | ${h.hidden('set_lock', '1')} |
|
97 | ${h.hidden('set_lock', '1')} | |
96 | <button class="btn btn-small" type="submit" |
|
98 | <button class="btn btn-small" type="submit" | |
97 | onclick="return confirm('${_('Confirm to lock repository.')}');"> |
|
99 | onclick="return confirm('${_('Confirm to lock repository.')}');"> | |
98 | <i class="icon-lock"></i> |
|
100 | <i class="icon-lock"></i> | |
99 | ${_('Lock Repository')} |
|
101 | ${_('Lock Repository')} | |
100 | </button> |
|
102 | </button> | |
101 | %endif |
|
103 | %endif | |
102 | </div> |
|
104 | </div> | |
103 | <div class="field" > |
|
105 | <div class="field" > | |
104 | <span class="help-block"> |
|
106 | <span class="help-block"> | |
105 | ${_('Force repository locking. This only works when anonymous access is disabled. Pulling from the repository locks the repository to that user until the same user pushes to that repository again.')} |
|
107 | ${_('Force repository locking. This only works when anonymous access is disabled. Pulling from the repository locks the repository to that user until the same user pushes to that repository again.')} | |
106 | </span> |
|
108 | </span> | |
107 | </div> |
|
109 | </div> | |
108 | ${h.end_form()} |
|
110 | ${h.end_form()} | |
109 | </div> |
|
111 | </div> | |
110 | </div> |
|
112 | </div> | |
111 |
|
113 | |||
112 | <div class="panel panel-danger"> |
|
114 | <div class="panel panel-danger"> | |
113 | <div class="panel-heading" id="advanced-delete"> |
|
115 | <div class="panel-heading" id="advanced-delete"> | |
114 | <h3 class="panel-title">${_('Delete repository')} <a class="permalink" href="#advanced-delete"> ΒΆ</a></h3> |
|
116 | <h3 class="panel-title">${_('Delete repository')} <a class="permalink" href="#advanced-delete"> ΒΆ</a></h3> | |
115 | </div> |
|
117 | </div> | |
116 | <div class="panel-body"> |
|
118 | <div class="panel-body"> | |
117 | ${h.secure_form(h.route_path('edit_repo_advanced_delete', repo_name=c.repo_name), request=request)} |
|
119 | ${h.secure_form(h.route_path('edit_repo_advanced_delete', repo_name=c.repo_name), request=request)} | |
118 | <table class="display"> |
|
120 | <table class="display"> | |
119 | <tr> |
|
121 | <tr> | |
120 | <td> |
|
122 | <td> | |
121 | ${_ungettext('This repository has %s fork.', 'This repository has %s forks.', c.rhodecode_db_repo.forks.count()) % c.rhodecode_db_repo.forks.count()} |
|
123 | ${_ungettext('This repository has %s fork.', 'This repository has %s forks.', c.rhodecode_db_repo.forks.count()) % c.rhodecode_db_repo.forks.count()} | |
122 | </td> |
|
124 | </td> | |
123 | <td> |
|
125 | <td> | |
124 | %if c.rhodecode_db_repo.forks.count(): |
|
126 | %if c.rhodecode_db_repo.forks.count(): | |
125 | <input type="radio" name="forks" value="detach_forks" checked="checked"/> <label for="forks">${_('Detach forks')}</label> |
|
127 | <input type="radio" name="forks" value="detach_forks" checked="checked"/> <label for="forks">${_('Detach forks')}</label> | |
126 | %endif |
|
128 | %endif | |
127 | </td> |
|
129 | </td> | |
128 | <td> |
|
130 | <td> | |
129 | %if c.rhodecode_db_repo.forks.count(): |
|
131 | %if c.rhodecode_db_repo.forks.count(): | |
130 | <input type="radio" name="forks" value="delete_forks"/> <label for="forks">${_('Delete forks')}</label> |
|
132 | <input type="radio" name="forks" value="delete_forks"/> <label for="forks">${_('Delete forks')}</label> | |
131 | %endif |
|
133 | %endif | |
132 | </td> |
|
134 | </td> | |
133 | </tr> |
|
135 | </tr> | |
|
136 | <% attached_prs = len(c.rhodecode_db_repo.pull_requests_source + c.rhodecode_db_repo.pull_requests_target) %> | |||
|
137 | % if c.rhodecode_db_repo.pull_requests_source or c.rhodecode_db_repo.pull_requests_target: | |||
|
138 | <tr> | |||
|
139 | <td> | |||
|
140 | ${_ungettext('This repository has %s attached pull request.', 'This repository has %s attached pull requests.', attached_prs) % attached_prs} | |||
|
141 | <br/> | |||
|
142 | ${_('Consider to archive this repository instead.')} | |||
|
143 | </td> | |||
|
144 | <td></td> | |||
|
145 | <td></td> | |||
|
146 | </tr> | |||
|
147 | % endif | |||
134 | </table> |
|
148 | </table> | |
135 | <div style="margin: 0 0 20px 0" class="fake-space"></div> |
|
149 | <div style="margin: 0 0 20px 0" class="fake-space"></div> | |
136 |
|
150 | |||
137 | <div class="field"> |
|
151 | <div class="field"> | |
138 | <button class="btn btn-small btn-danger" type="submit" |
|
152 | <button class="btn btn-small btn-danger" type="submit" | |
139 | onclick="return confirm('${_('Confirm to delete this repository: %s') % c.repo_name}');"> |
|
153 | onclick="return confirm('${_('Confirm to delete this repository: %s') % c.repo_name}');"> | |
140 | <i class="icon-remove-sign"></i> |
|
154 | <i class="icon-remove-sign"></i> | |
141 | ${_('Delete This Repository')} |
|
155 | ${_('Delete This Repository')} | |
142 | </button> |
|
156 | </button> | |
143 | </div> |
|
157 | </div> | |
144 | <div class="field"> |
|
158 | <div class="field"> | |
145 | <span class="help-block"> |
|
159 | <span class="help-block"> | |
146 | ${_('This repository will be renamed in a special way in order to make it inaccessible to RhodeCode Enterprise and its VCS systems. If you need to fully delete it from the file system, please do it manually, or with rhodecode-cleanup-repos command available in rhodecode-tools.')} |
|
160 | ${_('This repository will be renamed in a special way in order to make it inaccessible to RhodeCode Enterprise and its VCS systems. If you need to fully delete it from the file system, please do it manually, or with rhodecode-cleanup-repos command available in rhodecode-tools.')} | |
147 | </span> |
|
161 | </span> | |
148 | </div> |
|
162 | </div> | |
149 |
|
163 | |||
150 | ${h.end_form()} |
|
164 | ${h.end_form()} | |
151 | </div> |
|
165 | </div> | |
152 | </div> |
|
166 | </div> | |
153 |
|
167 | |||
154 |
|
168 | |||
155 | <script> |
|
169 | <script> | |
156 |
|
170 | |||
157 | var currentRepoId = ${c.rhodecode_db_repo.repo_id}; |
|
171 | var currentRepoId = ${c.rhodecode_db_repo.repo_id}; | |
158 |
|
172 | |||
159 | var repoTypeFilter = function(data) { |
|
173 | var repoTypeFilter = function(data) { | |
160 | var results = []; |
|
174 | var results = []; | |
161 |
|
175 | |||
162 | if (!data.results[0]) { |
|
176 | if (!data.results[0]) { | |
163 | return data |
|
177 | return data | |
164 | } |
|
178 | } | |
165 |
|
179 | |||
166 | $.each(data.results[0].children, function() { |
|
180 | $.each(data.results[0].children, function() { | |
167 | // filter out the SAME repo, it cannot be used as fork of itself |
|
181 | // filter out the SAME repo, it cannot be used as fork of itself | |
168 | if (this.repo_id != currentRepoId) { |
|
182 | if (this.repo_id != currentRepoId) { | |
169 | this.id = this.repo_id; |
|
183 | this.id = this.repo_id; | |
170 | results.push(this) |
|
184 | results.push(this) | |
171 | } |
|
185 | } | |
172 | }); |
|
186 | }); | |
173 | data.results[0].children = results; |
|
187 | data.results[0].children = results; | |
174 | return data; |
|
188 | return data; | |
175 | }; |
|
189 | }; | |
176 |
|
190 | |||
177 | $("#id_fork_of").select2({ |
|
191 | $("#id_fork_of").select2({ | |
178 | cachedDataSource: {}, |
|
192 | cachedDataSource: {}, | |
179 | minimumInputLength: 2, |
|
193 | minimumInputLength: 2, | |
180 | placeholder: "${_('Change repository') if c.rhodecode_db_repo.fork else _('Pick repository')}", |
|
194 | placeholder: "${_('Change repository') if c.rhodecode_db_repo.fork else _('Pick repository')}", | |
181 | dropdownAutoWidth: true, |
|
195 | dropdownAutoWidth: true, | |
182 | containerCssClass: "drop-menu", |
|
196 | containerCssClass: "drop-menu", | |
183 | dropdownCssClass: "drop-menu-dropdown", |
|
197 | dropdownCssClass: "drop-menu-dropdown", | |
184 | formatResult: formatRepoResult, |
|
198 | formatResult: formatRepoResult, | |
185 | query: $.debounce(250, function(query){ |
|
199 | query: $.debounce(250, function(query){ | |
186 | self = this; |
|
200 | self = this; | |
187 | var cacheKey = query.term; |
|
201 | var cacheKey = query.term; | |
188 | var cachedData = self.cachedDataSource[cacheKey]; |
|
202 | var cachedData = self.cachedDataSource[cacheKey]; | |
189 |
|
203 | |||
190 | if (cachedData) { |
|
204 | if (cachedData) { | |
191 | query.callback({results: cachedData.results}); |
|
205 | query.callback({results: cachedData.results}); | |
192 | } else { |
|
206 | } else { | |
193 | $.ajax({ |
|
207 | $.ajax({ | |
194 | url: pyroutes.url('repo_list_data'), |
|
208 | url: pyroutes.url('repo_list_data'), | |
195 | data: {'query': query.term, repo_type: '${c.rhodecode_db_repo.repo_type}'}, |
|
209 | data: {'query': query.term, repo_type: '${c.rhodecode_db_repo.repo_type}'}, | |
196 | dataType: 'json', |
|
210 | dataType: 'json', | |
197 | type: 'GET', |
|
211 | type: 'GET', | |
198 | success: function(data) { |
|
212 | success: function(data) { | |
199 | data = repoTypeFilter(data); |
|
213 | data = repoTypeFilter(data); | |
200 | self.cachedDataSource[cacheKey] = data; |
|
214 | self.cachedDataSource[cacheKey] = data; | |
201 | query.callback({results: data.results}); |
|
215 | query.callback({results: data.results}); | |
202 | }, |
|
216 | }, | |
203 | error: function(data, textStatus, errorThrown) { |
|
217 | error: function(data, textStatus, errorThrown) { | |
204 | alert("Error while fetching entries.\nError code {0} ({1}).".format(data.status, data.statusText)); |
|
218 | alert("Error while fetching entries.\nError code {0} ({1}).".format(data.status, data.statusText)); | |
205 | } |
|
219 | } | |
206 | }) |
|
220 | }) | |
207 | } |
|
221 | } | |
208 | }) |
|
222 | }) | |
209 | }); |
|
223 | }); | |
210 | </script> |
|
224 | </script> | |
211 |
|
225 |
@@ -1,349 +1,349 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2018 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Helpers for fixture generation |
|
22 | Helpers for fixture generation | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import os |
|
25 | import os | |
26 | import time |
|
26 | import time | |
27 | import tempfile |
|
27 | import tempfile | |
28 | import shutil |
|
28 | import shutil | |
29 |
|
29 | |||
30 | import configobj |
|
30 | import configobj | |
31 |
|
31 | |||
32 | from rhodecode.tests import * |
|
32 | from rhodecode.tests import * | |
33 | from rhodecode.model.db import Repository, User, RepoGroup, UserGroup, Gist, UserEmailMap |
|
33 | from rhodecode.model.db import Repository, User, RepoGroup, UserGroup, Gist, UserEmailMap | |
34 | from rhodecode.model.meta import Session |
|
34 | from rhodecode.model.meta import Session | |
35 | from rhodecode.model.repo import RepoModel |
|
35 | from rhodecode.model.repo import RepoModel | |
36 | from rhodecode.model.user import UserModel |
|
36 | from rhodecode.model.user import UserModel | |
37 | from rhodecode.model.repo_group import RepoGroupModel |
|
37 | from rhodecode.model.repo_group import RepoGroupModel | |
38 | from rhodecode.model.user_group import UserGroupModel |
|
38 | from rhodecode.model.user_group import UserGroupModel | |
39 | from rhodecode.model.gist import GistModel |
|
39 | from rhodecode.model.gist import GistModel | |
40 | from rhodecode.model.auth_token import AuthTokenModel |
|
40 | from rhodecode.model.auth_token import AuthTokenModel | |
41 |
|
41 | |||
42 | dn = os.path.dirname |
|
42 | dn = os.path.dirname | |
43 | FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'tests', 'fixtures') |
|
43 | FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'tests', 'fixtures') | |
44 |
|
44 | |||
45 |
|
45 | |||
46 | def error_function(*args, **kwargs): |
|
46 | def error_function(*args, **kwargs): | |
47 | raise Exception('Total Crash !') |
|
47 | raise Exception('Total Crash !') | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | class TestINI(object): |
|
50 | class TestINI(object): | |
51 | """ |
|
51 | """ | |
52 | Allows to create a new test.ini file as a copy of existing one with edited |
|
52 | Allows to create a new test.ini file as a copy of existing one with edited | |
53 | data. Example usage:: |
|
53 | data. Example usage:: | |
54 |
|
54 | |||
55 | with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path: |
|
55 | with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path: | |
56 | print('paster server %s' % new_test_ini) |
|
56 | print('paster server %s' % new_test_ini) | |
57 | """ |
|
57 | """ | |
58 |
|
58 | |||
59 | def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT', |
|
59 | def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT', | |
60 | destroy=True, dir=None): |
|
60 | destroy=True, dir=None): | |
61 | self.ini_file_path = ini_file_path |
|
61 | self.ini_file_path = ini_file_path | |
62 | self.ini_params = ini_params |
|
62 | self.ini_params = ini_params | |
63 | self.new_path = None |
|
63 | self.new_path = None | |
64 | self.new_path_prefix = new_file_prefix |
|
64 | self.new_path_prefix = new_file_prefix | |
65 | self._destroy = destroy |
|
65 | self._destroy = destroy | |
66 | self._dir = dir |
|
66 | self._dir = dir | |
67 |
|
67 | |||
68 | def __enter__(self): |
|
68 | def __enter__(self): | |
69 | return self.create() |
|
69 | return self.create() | |
70 |
|
70 | |||
71 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
71 | def __exit__(self, exc_type, exc_val, exc_tb): | |
72 | self.destroy() |
|
72 | self.destroy() | |
73 |
|
73 | |||
74 | def create(self): |
|
74 | def create(self): | |
75 | config = configobj.ConfigObj( |
|
75 | config = configobj.ConfigObj( | |
76 | self.ini_file_path, file_error=True, write_empty_values=True) |
|
76 | self.ini_file_path, file_error=True, write_empty_values=True) | |
77 |
|
77 | |||
78 | for data in self.ini_params: |
|
78 | for data in self.ini_params: | |
79 | section, ini_params = data.items()[0] |
|
79 | section, ini_params = data.items()[0] | |
80 | for key, val in ini_params.items(): |
|
80 | for key, val in ini_params.items(): | |
81 | config[section][key] = val |
|
81 | config[section][key] = val | |
82 | with tempfile.NamedTemporaryFile( |
|
82 | with tempfile.NamedTemporaryFile( | |
83 | prefix=self.new_path_prefix, suffix='.ini', dir=self._dir, |
|
83 | prefix=self.new_path_prefix, suffix='.ini', dir=self._dir, | |
84 | delete=False) as new_ini_file: |
|
84 | delete=False) as new_ini_file: | |
85 | config.write(new_ini_file) |
|
85 | config.write(new_ini_file) | |
86 | self.new_path = new_ini_file.name |
|
86 | self.new_path = new_ini_file.name | |
87 |
|
87 | |||
88 | return self.new_path |
|
88 | return self.new_path | |
89 |
|
89 | |||
90 | def destroy(self): |
|
90 | def destroy(self): | |
91 | if self._destroy: |
|
91 | if self._destroy: | |
92 | os.remove(self.new_path) |
|
92 | os.remove(self.new_path) | |
93 |
|
93 | |||
94 |
|
94 | |||
95 | class Fixture(object): |
|
95 | class Fixture(object): | |
96 |
|
96 | |||
97 | def anon_access(self, status): |
|
97 | def anon_access(self, status): | |
98 | """ |
|
98 | """ | |
99 | Context process for disabling anonymous access. use like: |
|
99 | Context process for disabling anonymous access. use like: | |
100 | fixture = Fixture() |
|
100 | fixture = Fixture() | |
101 | with fixture.anon_access(False): |
|
101 | with fixture.anon_access(False): | |
102 | #tests |
|
102 | #tests | |
103 |
|
103 | |||
104 | after this block anon access will be set to `not status` |
|
104 | after this block anon access will be set to `not status` | |
105 | """ |
|
105 | """ | |
106 |
|
106 | |||
107 | class context(object): |
|
107 | class context(object): | |
108 | def __enter__(self): |
|
108 | def __enter__(self): | |
109 | anon = User.get_default_user() |
|
109 | anon = User.get_default_user() | |
110 | anon.active = status |
|
110 | anon.active = status | |
111 | Session().add(anon) |
|
111 | Session().add(anon) | |
112 | Session().commit() |
|
112 | Session().commit() | |
113 | time.sleep(1.5) # must sleep for cache (1s to expire) |
|
113 | time.sleep(1.5) # must sleep for cache (1s to expire) | |
114 |
|
114 | |||
115 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
115 | def __exit__(self, exc_type, exc_val, exc_tb): | |
116 | anon = User.get_default_user() |
|
116 | anon = User.get_default_user() | |
117 | anon.active = not status |
|
117 | anon.active = not status | |
118 | Session().add(anon) |
|
118 | Session().add(anon) | |
119 | Session().commit() |
|
119 | Session().commit() | |
120 |
|
120 | |||
121 | return context() |
|
121 | return context() | |
122 |
|
122 | |||
123 | def _get_repo_create_params(self, **custom): |
|
123 | def _get_repo_create_params(self, **custom): | |
124 | defs = { |
|
124 | defs = { | |
125 | 'repo_name': None, |
|
125 | 'repo_name': None, | |
126 | 'repo_type': 'hg', |
|
126 | 'repo_type': 'hg', | |
127 | 'clone_uri': '', |
|
127 | 'clone_uri': '', | |
128 | 'push_uri': '', |
|
128 | 'push_uri': '', | |
129 | 'repo_group': '-1', |
|
129 | 'repo_group': '-1', | |
130 | 'repo_description': 'DESC', |
|
130 | 'repo_description': 'DESC', | |
131 | 'repo_private': False, |
|
131 | 'repo_private': False, | |
132 | 'repo_landing_rev': 'rev:tip', |
|
132 | 'repo_landing_rev': 'rev:tip', | |
133 | 'repo_copy_permissions': False, |
|
133 | 'repo_copy_permissions': False, | |
134 | 'repo_state': Repository.STATE_CREATED, |
|
134 | 'repo_state': Repository.STATE_CREATED, | |
135 | } |
|
135 | } | |
136 | defs.update(custom) |
|
136 | defs.update(custom) | |
137 | if 'repo_name_full' not in custom: |
|
137 | if 'repo_name_full' not in custom: | |
138 | defs.update({'repo_name_full': defs['repo_name']}) |
|
138 | defs.update({'repo_name_full': defs['repo_name']}) | |
139 |
|
139 | |||
140 | # fix the repo name if passed as repo_name_full |
|
140 | # fix the repo name if passed as repo_name_full | |
141 | if defs['repo_name']: |
|
141 | if defs['repo_name']: | |
142 | defs['repo_name'] = defs['repo_name'].split('/')[-1] |
|
142 | defs['repo_name'] = defs['repo_name'].split('/')[-1] | |
143 |
|
143 | |||
144 | return defs |
|
144 | return defs | |
145 |
|
145 | |||
146 | def _get_group_create_params(self, **custom): |
|
146 | def _get_group_create_params(self, **custom): | |
147 | defs = { |
|
147 | defs = { | |
148 | 'group_name': None, |
|
148 | 'group_name': None, | |
149 | 'group_description': 'DESC', |
|
149 | 'group_description': 'DESC', | |
150 | 'perm_updates': [], |
|
150 | 'perm_updates': [], | |
151 | 'perm_additions': [], |
|
151 | 'perm_additions': [], | |
152 | 'perm_deletions': [], |
|
152 | 'perm_deletions': [], | |
153 | 'group_parent_id': -1, |
|
153 | 'group_parent_id': -1, | |
154 | 'enable_locking': False, |
|
154 | 'enable_locking': False, | |
155 | 'recursive': False, |
|
155 | 'recursive': False, | |
156 | } |
|
156 | } | |
157 | defs.update(custom) |
|
157 | defs.update(custom) | |
158 |
|
158 | |||
159 | return defs |
|
159 | return defs | |
160 |
|
160 | |||
161 | def _get_user_create_params(self, name, **custom): |
|
161 | def _get_user_create_params(self, name, **custom): | |
162 | defs = { |
|
162 | defs = { | |
163 | 'username': name, |
|
163 | 'username': name, | |
164 | 'password': 'qweqwe', |
|
164 | 'password': 'qweqwe', | |
165 | 'email': '%s+test@rhodecode.org' % name, |
|
165 | 'email': '%s+test@rhodecode.org' % name, | |
166 | 'firstname': 'TestUser', |
|
166 | 'firstname': 'TestUser', | |
167 | 'lastname': 'Test', |
|
167 | 'lastname': 'Test', | |
168 | 'active': True, |
|
168 | 'active': True, | |
169 | 'admin': False, |
|
169 | 'admin': False, | |
170 | 'extern_type': 'rhodecode', |
|
170 | 'extern_type': 'rhodecode', | |
171 | 'extern_name': None, |
|
171 | 'extern_name': None, | |
172 | } |
|
172 | } | |
173 | defs.update(custom) |
|
173 | defs.update(custom) | |
174 |
|
174 | |||
175 | return defs |
|
175 | return defs | |
176 |
|
176 | |||
177 | def _get_user_group_create_params(self, name, **custom): |
|
177 | def _get_user_group_create_params(self, name, **custom): | |
178 | defs = { |
|
178 | defs = { | |
179 | 'users_group_name': name, |
|
179 | 'users_group_name': name, | |
180 | 'user_group_description': 'DESC', |
|
180 | 'user_group_description': 'DESC', | |
181 | 'users_group_active': True, |
|
181 | 'users_group_active': True, | |
182 | 'user_group_data': {}, |
|
182 | 'user_group_data': {}, | |
183 | } |
|
183 | } | |
184 | defs.update(custom) |
|
184 | defs.update(custom) | |
185 |
|
185 | |||
186 | return defs |
|
186 | return defs | |
187 |
|
187 | |||
188 | def create_repo(self, name, **kwargs): |
|
188 | def create_repo(self, name, **kwargs): | |
189 | repo_group = kwargs.get('repo_group') |
|
189 | repo_group = kwargs.get('repo_group') | |
190 | if isinstance(repo_group, RepoGroup): |
|
190 | if isinstance(repo_group, RepoGroup): | |
191 | kwargs['repo_group'] = repo_group.group_id |
|
191 | kwargs['repo_group'] = repo_group.group_id | |
192 | name = name.split(Repository.NAME_SEP)[-1] |
|
192 | name = name.split(Repository.NAME_SEP)[-1] | |
193 | name = Repository.NAME_SEP.join((repo_group.group_name, name)) |
|
193 | name = Repository.NAME_SEP.join((repo_group.group_name, name)) | |
194 |
|
194 | |||
195 | if 'skip_if_exists' in kwargs: |
|
195 | if 'skip_if_exists' in kwargs: | |
196 | del kwargs['skip_if_exists'] |
|
196 | del kwargs['skip_if_exists'] | |
197 | r = Repository.get_by_repo_name(name) |
|
197 | r = Repository.get_by_repo_name(name) | |
198 | if r: |
|
198 | if r: | |
199 | return r |
|
199 | return r | |
200 |
|
200 | |||
201 | form_data = self._get_repo_create_params(repo_name=name, **kwargs) |
|
201 | form_data = self._get_repo_create_params(repo_name=name, **kwargs) | |
202 | cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
202 | cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
203 | RepoModel().create(form_data, cur_user) |
|
203 | RepoModel().create(form_data, cur_user) | |
204 | Session().commit() |
|
204 | Session().commit() | |
205 | repo = Repository.get_by_repo_name(name) |
|
205 | repo = Repository.get_by_repo_name(name) | |
206 | assert repo |
|
206 | assert repo | |
207 | return repo |
|
207 | return repo | |
208 |
|
208 | |||
209 | def create_fork(self, repo_to_fork, fork_name, **kwargs): |
|
209 | def create_fork(self, repo_to_fork, fork_name, **kwargs): | |
210 | repo_to_fork = Repository.get_by_repo_name(repo_to_fork) |
|
210 | repo_to_fork = Repository.get_by_repo_name(repo_to_fork) | |
211 |
|
211 | |||
212 | form_data = self._get_repo_create_params(repo_name=fork_name, |
|
212 | form_data = self._get_repo_create_params(repo_name=fork_name, | |
213 | fork_parent_id=repo_to_fork.repo_id, |
|
213 | fork_parent_id=repo_to_fork.repo_id, | |
214 | repo_type=repo_to_fork.repo_type, |
|
214 | repo_type=repo_to_fork.repo_type, | |
215 | **kwargs) |
|
215 | **kwargs) | |
216 | #TODO: fix it !! |
|
216 | #TODO: fix it !! | |
217 | form_data['description'] = form_data['repo_description'] |
|
217 | form_data['description'] = form_data['repo_description'] | |
218 | form_data['private'] = form_data['repo_private'] |
|
218 | form_data['private'] = form_data['repo_private'] | |
219 | form_data['landing_rev'] = form_data['repo_landing_rev'] |
|
219 | form_data['landing_rev'] = form_data['repo_landing_rev'] | |
220 |
|
220 | |||
221 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
221 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
222 | RepoModel().create_fork(form_data, cur_user=owner) |
|
222 | RepoModel().create_fork(form_data, cur_user=owner) | |
223 | Session().commit() |
|
223 | Session().commit() | |
224 | r = Repository.get_by_repo_name(fork_name) |
|
224 | r = Repository.get_by_repo_name(fork_name) | |
225 | assert r |
|
225 | assert r | |
226 | return r |
|
226 | return r | |
227 |
|
227 | |||
228 | def destroy_repo(self, repo_name, **kwargs): |
|
228 | def destroy_repo(self, repo_name, **kwargs): | |
229 | RepoModel().delete(repo_name, **kwargs) |
|
229 | RepoModel().delete(repo_name, pull_requests='delete', **kwargs) | |
230 | Session().commit() |
|
230 | Session().commit() | |
231 |
|
231 | |||
232 | def destroy_repo_on_filesystem(self, repo_name): |
|
232 | def destroy_repo_on_filesystem(self, repo_name): | |
233 | rm_path = os.path.join(RepoModel().repos_path, repo_name) |
|
233 | rm_path = os.path.join(RepoModel().repos_path, repo_name) | |
234 | if os.path.isdir(rm_path): |
|
234 | if os.path.isdir(rm_path): | |
235 | shutil.rmtree(rm_path) |
|
235 | shutil.rmtree(rm_path) | |
236 |
|
236 | |||
237 | def create_repo_group(self, name, **kwargs): |
|
237 | def create_repo_group(self, name, **kwargs): | |
238 | if 'skip_if_exists' in kwargs: |
|
238 | if 'skip_if_exists' in kwargs: | |
239 | del kwargs['skip_if_exists'] |
|
239 | del kwargs['skip_if_exists'] | |
240 | gr = RepoGroup.get_by_group_name(group_name=name) |
|
240 | gr = RepoGroup.get_by_group_name(group_name=name) | |
241 | if gr: |
|
241 | if gr: | |
242 | return gr |
|
242 | return gr | |
243 | form_data = self._get_group_create_params(group_name=name, **kwargs) |
|
243 | form_data = self._get_group_create_params(group_name=name, **kwargs) | |
244 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
244 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
245 | gr = RepoGroupModel().create( |
|
245 | gr = RepoGroupModel().create( | |
246 | group_name=form_data['group_name'], |
|
246 | group_name=form_data['group_name'], | |
247 | group_description=form_data['group_name'], |
|
247 | group_description=form_data['group_name'], | |
248 | owner=owner) |
|
248 | owner=owner) | |
249 | Session().commit() |
|
249 | Session().commit() | |
250 | gr = RepoGroup.get_by_group_name(gr.group_name) |
|
250 | gr = RepoGroup.get_by_group_name(gr.group_name) | |
251 | return gr |
|
251 | return gr | |
252 |
|
252 | |||
253 | def destroy_repo_group(self, repogroupid): |
|
253 | def destroy_repo_group(self, repogroupid): | |
254 | RepoGroupModel().delete(repogroupid) |
|
254 | RepoGroupModel().delete(repogroupid) | |
255 | Session().commit() |
|
255 | Session().commit() | |
256 |
|
256 | |||
257 | def create_user(self, name, **kwargs): |
|
257 | def create_user(self, name, **kwargs): | |
258 | if 'skip_if_exists' in kwargs: |
|
258 | if 'skip_if_exists' in kwargs: | |
259 | del kwargs['skip_if_exists'] |
|
259 | del kwargs['skip_if_exists'] | |
260 | user = User.get_by_username(name) |
|
260 | user = User.get_by_username(name) | |
261 | if user: |
|
261 | if user: | |
262 | return user |
|
262 | return user | |
263 | form_data = self._get_user_create_params(name, **kwargs) |
|
263 | form_data = self._get_user_create_params(name, **kwargs) | |
264 | user = UserModel().create(form_data) |
|
264 | user = UserModel().create(form_data) | |
265 |
|
265 | |||
266 | # create token for user |
|
266 | # create token for user | |
267 | AuthTokenModel().create( |
|
267 | AuthTokenModel().create( | |
268 | user=user, description=u'TEST_USER_TOKEN') |
|
268 | user=user, description=u'TEST_USER_TOKEN') | |
269 |
|
269 | |||
270 | Session().commit() |
|
270 | Session().commit() | |
271 | user = User.get_by_username(user.username) |
|
271 | user = User.get_by_username(user.username) | |
272 | return user |
|
272 | return user | |
273 |
|
273 | |||
274 | def destroy_user(self, userid): |
|
274 | def destroy_user(self, userid): | |
275 | UserModel().delete(userid) |
|
275 | UserModel().delete(userid) | |
276 | Session().commit() |
|
276 | Session().commit() | |
277 |
|
277 | |||
278 | def create_additional_user_email(self, user, email): |
|
278 | def create_additional_user_email(self, user, email): | |
279 | uem = UserEmailMap() |
|
279 | uem = UserEmailMap() | |
280 | uem.user = user |
|
280 | uem.user = user | |
281 | uem.email = email |
|
281 | uem.email = email | |
282 | Session().add(uem) |
|
282 | Session().add(uem) | |
283 | return uem |
|
283 | return uem | |
284 |
|
284 | |||
285 | def destroy_users(self, userid_iter): |
|
285 | def destroy_users(self, userid_iter): | |
286 | for user_id in userid_iter: |
|
286 | for user_id in userid_iter: | |
287 | if User.get_by_username(user_id): |
|
287 | if User.get_by_username(user_id): | |
288 | UserModel().delete(user_id) |
|
288 | UserModel().delete(user_id) | |
289 | Session().commit() |
|
289 | Session().commit() | |
290 |
|
290 | |||
291 | def create_user_group(self, name, **kwargs): |
|
291 | def create_user_group(self, name, **kwargs): | |
292 | if 'skip_if_exists' in kwargs: |
|
292 | if 'skip_if_exists' in kwargs: | |
293 | del kwargs['skip_if_exists'] |
|
293 | del kwargs['skip_if_exists'] | |
294 | gr = UserGroup.get_by_group_name(group_name=name) |
|
294 | gr = UserGroup.get_by_group_name(group_name=name) | |
295 | if gr: |
|
295 | if gr: | |
296 | return gr |
|
296 | return gr | |
297 | # map active flag to the real attribute. For API consistency of fixtures |
|
297 | # map active flag to the real attribute. For API consistency of fixtures | |
298 | if 'active' in kwargs: |
|
298 | if 'active' in kwargs: | |
299 | kwargs['users_group_active'] = kwargs['active'] |
|
299 | kwargs['users_group_active'] = kwargs['active'] | |
300 | del kwargs['active'] |
|
300 | del kwargs['active'] | |
301 | form_data = self._get_user_group_create_params(name, **kwargs) |
|
301 | form_data = self._get_user_group_create_params(name, **kwargs) | |
302 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
302 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) | |
303 | user_group = UserGroupModel().create( |
|
303 | user_group = UserGroupModel().create( | |
304 | name=form_data['users_group_name'], |
|
304 | name=form_data['users_group_name'], | |
305 | description=form_data['user_group_description'], |
|
305 | description=form_data['user_group_description'], | |
306 | owner=owner, active=form_data['users_group_active'], |
|
306 | owner=owner, active=form_data['users_group_active'], | |
307 | group_data=form_data['user_group_data']) |
|
307 | group_data=form_data['user_group_data']) | |
308 | Session().commit() |
|
308 | Session().commit() | |
309 | user_group = UserGroup.get_by_group_name(user_group.users_group_name) |
|
309 | user_group = UserGroup.get_by_group_name(user_group.users_group_name) | |
310 | return user_group |
|
310 | return user_group | |
311 |
|
311 | |||
312 | def destroy_user_group(self, usergroupid): |
|
312 | def destroy_user_group(self, usergroupid): | |
313 | UserGroupModel().delete(user_group=usergroupid, force=True) |
|
313 | UserGroupModel().delete(user_group=usergroupid, force=True) | |
314 | Session().commit() |
|
314 | Session().commit() | |
315 |
|
315 | |||
316 | def create_gist(self, **kwargs): |
|
316 | def create_gist(self, **kwargs): | |
317 | form_data = { |
|
317 | form_data = { | |
318 | 'description': 'new-gist', |
|
318 | 'description': 'new-gist', | |
319 | 'owner': TEST_USER_ADMIN_LOGIN, |
|
319 | 'owner': TEST_USER_ADMIN_LOGIN, | |
320 | 'gist_type': GistModel.cls.GIST_PUBLIC, |
|
320 | 'gist_type': GistModel.cls.GIST_PUBLIC, | |
321 | 'lifetime': -1, |
|
321 | 'lifetime': -1, | |
322 | 'acl_level': Gist.ACL_LEVEL_PUBLIC, |
|
322 | 'acl_level': Gist.ACL_LEVEL_PUBLIC, | |
323 | 'gist_mapping': {'filename1.txt': {'content': 'hello world'},} |
|
323 | 'gist_mapping': {'filename1.txt': {'content': 'hello world'},} | |
324 | } |
|
324 | } | |
325 | form_data.update(kwargs) |
|
325 | form_data.update(kwargs) | |
326 | gist = GistModel().create( |
|
326 | gist = GistModel().create( | |
327 | description=form_data['description'], owner=form_data['owner'], |
|
327 | description=form_data['description'], owner=form_data['owner'], | |
328 | gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'], |
|
328 | gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'], | |
329 | lifetime=form_data['lifetime'], gist_acl_level=form_data['acl_level'] |
|
329 | lifetime=form_data['lifetime'], gist_acl_level=form_data['acl_level'] | |
330 | ) |
|
330 | ) | |
331 | Session().commit() |
|
331 | Session().commit() | |
332 | return gist |
|
332 | return gist | |
333 |
|
333 | |||
334 | def destroy_gists(self, gistid=None): |
|
334 | def destroy_gists(self, gistid=None): | |
335 | for g in GistModel.cls.get_all(): |
|
335 | for g in GistModel.cls.get_all(): | |
336 | if gistid: |
|
336 | if gistid: | |
337 | if gistid == g.gist_access_id: |
|
337 | if gistid == g.gist_access_id: | |
338 | GistModel().delete(g) |
|
338 | GistModel().delete(g) | |
339 | else: |
|
339 | else: | |
340 | GistModel().delete(g) |
|
340 | GistModel().delete(g) | |
341 | Session().commit() |
|
341 | Session().commit() | |
342 |
|
342 | |||
343 | def load_resource(self, resource_name, strip=False): |
|
343 | def load_resource(self, resource_name, strip=False): | |
344 | with open(os.path.join(FIXTURES, resource_name)) as f: |
|
344 | with open(os.path.join(FIXTURES, resource_name)) as f: | |
345 | source = f.read() |
|
345 | source = f.read() | |
346 | if strip: |
|
346 | if strip: | |
347 | source = source.strip() |
|
347 | source = source.strip() | |
348 |
|
348 | |||
349 | return source |
|
349 | return source |
General Comments 0
You need to be logged in to leave comments.
Login now