Show More
@@ -1,1072 +1,1073 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import re |
|
22 | import re | |
23 | import shutil |
|
23 | import shutil | |
24 | import time |
|
24 | import time | |
25 | import logging |
|
25 | import logging | |
26 | import traceback |
|
26 | import traceback | |
27 | import datetime |
|
27 | import datetime | |
28 |
|
28 | |||
29 | from pyramid.threadlocal import get_current_request |
|
29 | from pyramid.threadlocal import get_current_request | |
30 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
30 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
31 |
|
31 | |||
32 | from rhodecode import events |
|
32 | from rhodecode import events | |
33 | from rhodecode.lib.auth import HasUserGroupPermissionAny |
|
33 | from rhodecode.lib.auth import HasUserGroupPermissionAny | |
34 | from rhodecode.lib.caching_query import FromCache |
|
34 | from rhodecode.lib.caching_query import FromCache | |
35 | from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError |
|
35 | from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError | |
36 | from rhodecode.lib.hooks_base import log_delete_repository |
|
36 | from rhodecode.lib.hooks_base import log_delete_repository | |
37 | from rhodecode.lib.user_log_filter import user_log_filter |
|
37 | from rhodecode.lib.user_log_filter import user_log_filter | |
38 | from rhodecode.lib.utils import make_db_config |
|
38 | from rhodecode.lib.utils import make_db_config | |
39 | from rhodecode.lib.utils2 import ( |
|
39 | from rhodecode.lib.utils2 import ( | |
40 | safe_str, safe_unicode, remove_prefix, obfuscate_url_pw, |
|
40 | safe_str, safe_unicode, remove_prefix, obfuscate_url_pw, | |
41 | get_current_rhodecode_user, safe_int, datetime_to_time, |
|
41 | get_current_rhodecode_user, safe_int, datetime_to_time, | |
42 | action_logger_generic) |
|
42 | action_logger_generic) | |
43 | from rhodecode.lib.vcs.backends import get_backend |
|
43 | from rhodecode.lib.vcs.backends import get_backend | |
44 | from rhodecode.model import BaseModel |
|
44 | from rhodecode.model import BaseModel | |
45 | from rhodecode.model.db import ( |
|
45 | from rhodecode.model.db import ( | |
46 | _hash_key, joinedload, or_, Repository, UserRepoToPerm, UserGroupRepoToPerm, |
|
46 | _hash_key, joinedload, or_, Repository, UserRepoToPerm, UserGroupRepoToPerm, | |
47 | UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission, |
|
47 | UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission, | |
48 | Statistics, UserGroup, RepoGroup, RepositoryField, UserLog) |
|
48 | Statistics, UserGroup, RepoGroup, RepositoryField, UserLog) | |
49 |
|
49 | |||
50 | from rhodecode.model.settings import VcsSettingsModel |
|
50 | from rhodecode.model.settings import VcsSettingsModel | |
51 |
|
51 | |||
52 |
|
52 | |||
53 | log = logging.getLogger(__name__) |
|
53 | log = logging.getLogger(__name__) | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | class RepoModel(BaseModel): |
|
56 | class RepoModel(BaseModel): | |
57 |
|
57 | |||
58 | cls = Repository |
|
58 | cls = Repository | |
59 |
|
59 | |||
60 | def _get_user_group(self, users_group): |
|
60 | def _get_user_group(self, users_group): | |
61 | return self._get_instance(UserGroup, users_group, |
|
61 | return self._get_instance(UserGroup, users_group, | |
62 | callback=UserGroup.get_by_group_name) |
|
62 | callback=UserGroup.get_by_group_name) | |
63 |
|
63 | |||
64 | def _get_repo_group(self, repo_group): |
|
64 | def _get_repo_group(self, repo_group): | |
65 | return self._get_instance(RepoGroup, repo_group, |
|
65 | return self._get_instance(RepoGroup, repo_group, | |
66 | callback=RepoGroup.get_by_group_name) |
|
66 | callback=RepoGroup.get_by_group_name) | |
67 |
|
67 | |||
68 | def _create_default_perms(self, repository, private): |
|
68 | def _create_default_perms(self, repository, private): | |
69 | # create default permission |
|
69 | # create default permission | |
70 | default = 'repository.read' |
|
70 | default = 'repository.read' | |
71 | def_user = User.get_default_user() |
|
71 | def_user = User.get_default_user() | |
72 | for p in def_user.user_perms: |
|
72 | for p in def_user.user_perms: | |
73 | if p.permission.permission_name.startswith('repository.'): |
|
73 | if p.permission.permission_name.startswith('repository.'): | |
74 | default = p.permission.permission_name |
|
74 | default = p.permission.permission_name | |
75 | break |
|
75 | break | |
76 |
|
76 | |||
77 | default_perm = 'repository.none' if private else default |
|
77 | default_perm = 'repository.none' if private else default | |
78 |
|
78 | |||
79 | repo_to_perm = UserRepoToPerm() |
|
79 | repo_to_perm = UserRepoToPerm() | |
80 | repo_to_perm.permission = Permission.get_by_key(default_perm) |
|
80 | repo_to_perm.permission = Permission.get_by_key(default_perm) | |
81 |
|
81 | |||
82 | repo_to_perm.repository = repository |
|
82 | repo_to_perm.repository = repository | |
83 | repo_to_perm.user_id = def_user.user_id |
|
83 | repo_to_perm.user_id = def_user.user_id | |
84 |
|
84 | |||
85 | return repo_to_perm |
|
85 | return repo_to_perm | |
86 |
|
86 | |||
87 | @LazyProperty |
|
87 | @LazyProperty | |
88 | def repos_path(self): |
|
88 | def repos_path(self): | |
89 | """ |
|
89 | """ | |
90 | Gets the repositories root path from database |
|
90 | Gets the repositories root path from database | |
91 | """ |
|
91 | """ | |
92 | settings_model = VcsSettingsModel(sa=self.sa) |
|
92 | settings_model = VcsSettingsModel(sa=self.sa) | |
93 | return settings_model.get_repos_location() |
|
93 | return settings_model.get_repos_location() | |
94 |
|
94 | |||
95 | def get(self, repo_id): |
|
95 | def get(self, repo_id): | |
96 | repo = self.sa.query(Repository) \ |
|
96 | repo = self.sa.query(Repository) \ | |
97 | .filter(Repository.repo_id == repo_id) |
|
97 | .filter(Repository.repo_id == repo_id) | |
98 |
|
98 | |||
99 | return repo.scalar() |
|
99 | return repo.scalar() | |
100 |
|
100 | |||
101 | def get_repo(self, repository): |
|
101 | def get_repo(self, repository): | |
102 | return self._get_repo(repository) |
|
102 | return self._get_repo(repository) | |
103 |
|
103 | |||
104 | def get_by_repo_name(self, repo_name, cache=False): |
|
104 | def get_by_repo_name(self, repo_name, cache=False): | |
105 | repo = self.sa.query(Repository) \ |
|
105 | repo = self.sa.query(Repository) \ | |
106 | .filter(Repository.repo_name == repo_name) |
|
106 | .filter(Repository.repo_name == repo_name) | |
107 |
|
107 | |||
108 | if cache: |
|
108 | if cache: | |
109 | name_key = _hash_key(repo_name) |
|
109 | name_key = _hash_key(repo_name) | |
110 | repo = repo.options( |
|
110 | repo = repo.options( | |
111 | FromCache("sql_cache_short", "get_repo_%s" % name_key)) |
|
111 | FromCache("sql_cache_short", "get_repo_%s" % name_key)) | |
112 | return repo.scalar() |
|
112 | return repo.scalar() | |
113 |
|
113 | |||
114 | def _extract_id_from_repo_name(self, repo_name): |
|
114 | def _extract_id_from_repo_name(self, repo_name): | |
115 | if repo_name.startswith('/'): |
|
115 | if repo_name.startswith('/'): | |
116 | repo_name = repo_name.lstrip('/') |
|
116 | repo_name = repo_name.lstrip('/') | |
117 | by_id_match = re.match(r'^_(\d{1,})', repo_name) |
|
117 | by_id_match = re.match(r'^_(\d{1,})', repo_name) | |
118 | if by_id_match: |
|
118 | if by_id_match: | |
119 | return by_id_match.groups()[0] |
|
119 | return by_id_match.groups()[0] | |
120 |
|
120 | |||
121 | def get_repo_by_id(self, repo_name): |
|
121 | def get_repo_by_id(self, repo_name): | |
122 | """ |
|
122 | """ | |
123 | Extracts repo_name by id from special urls. |
|
123 | Extracts repo_name by id from special urls. | |
124 | Example url is _11/repo_name |
|
124 | Example url is _11/repo_name | |
125 |
|
125 | |||
126 | :param repo_name: |
|
126 | :param repo_name: | |
127 | :return: repo object if matched else None |
|
127 | :return: repo object if matched else None | |
128 | """ |
|
128 | """ | |
129 |
|
129 | |||
130 | try: |
|
130 | try: | |
131 | _repo_id = self._extract_id_from_repo_name(repo_name) |
|
131 | _repo_id = self._extract_id_from_repo_name(repo_name) | |
132 | if _repo_id: |
|
132 | if _repo_id: | |
133 | return self.get(_repo_id) |
|
133 | return self.get(_repo_id) | |
134 | except Exception: |
|
134 | except Exception: | |
135 | log.exception('Failed to extract repo_name from URL') |
|
135 | log.exception('Failed to extract repo_name from URL') | |
136 |
|
136 | |||
137 | return None |
|
137 | return None | |
138 |
|
138 | |||
139 | def get_repos_for_root(self, root, traverse=False): |
|
139 | def get_repos_for_root(self, root, traverse=False): | |
140 | if traverse: |
|
140 | if traverse: | |
141 | like_expression = u'{}%'.format(safe_unicode(root)) |
|
141 | like_expression = u'{}%'.format(safe_unicode(root)) | |
142 | repos = Repository.query().filter( |
|
142 | repos = Repository.query().filter( | |
143 | Repository.repo_name.like(like_expression)).all() |
|
143 | Repository.repo_name.like(like_expression)).all() | |
144 | else: |
|
144 | else: | |
145 | if root and not isinstance(root, RepoGroup): |
|
145 | if root and not isinstance(root, RepoGroup): | |
146 | raise ValueError( |
|
146 | raise ValueError( | |
147 | 'Root must be an instance ' |
|
147 | 'Root must be an instance ' | |
148 | 'of RepoGroup, got:{} instead'.format(type(root))) |
|
148 | 'of RepoGroup, got:{} instead'.format(type(root))) | |
149 | repos = Repository.query().filter(Repository.group == root).all() |
|
149 | repos = Repository.query().filter(Repository.group == root).all() | |
150 | return repos |
|
150 | return repos | |
151 |
|
151 | |||
152 | def get_url(self, repo, request=None, permalink=False): |
|
152 | def get_url(self, repo, request=None, permalink=False): | |
153 | if not request: |
|
153 | if not request: | |
154 | request = get_current_request() |
|
154 | request = get_current_request() | |
155 |
|
155 | |||
156 | if not request: |
|
156 | if not request: | |
157 | return |
|
157 | return | |
158 |
|
158 | |||
159 | if permalink: |
|
159 | if permalink: | |
160 | return request.route_url( |
|
160 | return request.route_url( | |
161 | 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id))) |
|
161 | 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id))) | |
162 | else: |
|
162 | else: | |
163 | return request.route_url( |
|
163 | return request.route_url( | |
164 | 'repo_summary', repo_name=safe_str(repo.repo_name)) |
|
164 | 'repo_summary', repo_name=safe_str(repo.repo_name)) | |
165 |
|
165 | |||
166 | def get_commit_url(self, repo, commit_id, request=None, permalink=False): |
|
166 | def get_commit_url(self, repo, commit_id, request=None, permalink=False): | |
167 | if not request: |
|
167 | if not request: | |
168 | request = get_current_request() |
|
168 | request = get_current_request() | |
169 |
|
169 | |||
170 | if not request: |
|
170 | if not request: | |
171 | return |
|
171 | return | |
172 |
|
172 | |||
173 | if permalink: |
|
173 | if permalink: | |
174 | return request.route_url( |
|
174 | return request.route_url( | |
175 | 'repo_commit', repo_name=safe_str(repo.repo_id), |
|
175 | 'repo_commit', repo_name=safe_str(repo.repo_id), | |
176 | commit_id=commit_id) |
|
176 | commit_id=commit_id) | |
177 |
|
177 | |||
178 | else: |
|
178 | else: | |
179 | return request.route_url( |
|
179 | return request.route_url( | |
180 | 'repo_commit', repo_name=safe_str(repo.repo_name), |
|
180 | 'repo_commit', repo_name=safe_str(repo.repo_name), | |
181 | commit_id=commit_id) |
|
181 | commit_id=commit_id) | |
182 |
|
182 | |||
183 | def get_repo_log(self, repo, filter_term): |
|
183 | def get_repo_log(self, repo, filter_term): | |
184 | repo_log = UserLog.query()\ |
|
184 | repo_log = UserLog.query()\ | |
185 | .filter(or_(UserLog.repository_id == repo.repo_id, |
|
185 | .filter(or_(UserLog.repository_id == repo.repo_id, | |
186 | UserLog.repository_name == repo.repo_name))\ |
|
186 | UserLog.repository_name == repo.repo_name))\ | |
187 | .options(joinedload(UserLog.user))\ |
|
187 | .options(joinedload(UserLog.user))\ | |
188 | .options(joinedload(UserLog.repository))\ |
|
188 | .options(joinedload(UserLog.repository))\ | |
189 | .order_by(UserLog.action_date.desc()) |
|
189 | .order_by(UserLog.action_date.desc()) | |
190 |
|
190 | |||
191 | repo_log = user_log_filter(repo_log, filter_term) |
|
191 | repo_log = user_log_filter(repo_log, filter_term) | |
192 | return repo_log |
|
192 | return repo_log | |
193 |
|
193 | |||
194 | @classmethod |
|
194 | @classmethod | |
195 | def update_commit_cache(cls, repositories=None): |
|
195 | def update_commit_cache(cls, repositories=None): | |
196 | if not repositories: |
|
196 | if not repositories: | |
197 | repositories = Repository.getAll() |
|
197 | repositories = Repository.getAll() | |
198 | for repo in repositories: |
|
198 | for repo in repositories: | |
199 | repo.update_commit_cache() |
|
199 | repo.update_commit_cache() | |
200 |
|
200 | |||
201 | def get_repos_as_dict(self, repo_list=None, admin=False, |
|
201 | def get_repos_as_dict(self, repo_list=None, admin=False, | |
202 | super_user_actions=False, short_name=None): |
|
202 | super_user_actions=False, short_name=None): | |
203 | _render = get_current_request().get_partial_renderer( |
|
203 | _render = get_current_request().get_partial_renderer( | |
204 | 'rhodecode:templates/data_table/_dt_elements.mako') |
|
204 | 'rhodecode:templates/data_table/_dt_elements.mako') | |
205 | c = _render.get_call_context() |
|
205 | c = _render.get_call_context() | |
206 |
|
206 | |||
207 | def quick_menu(repo_name): |
|
207 | def quick_menu(repo_name): | |
208 | return _render('quick_menu', repo_name) |
|
208 | return _render('quick_menu', repo_name) | |
209 |
|
209 | |||
210 | def repo_lnk(name, rtype, rstate, private, archived, fork_of): |
|
210 | def repo_lnk(name, rtype, rstate, private, archived, fork_of): | |
211 | if short_name is not None: |
|
211 | if short_name is not None: | |
212 | short_name_var = short_name |
|
212 | short_name_var = short_name | |
213 | else: |
|
213 | else: | |
214 | short_name_var = not admin |
|
214 | short_name_var = not admin | |
215 | return _render('repo_name', name, rtype, rstate, private, archived, fork_of, |
|
215 | return _render('repo_name', name, rtype, rstate, private, archived, fork_of, | |
216 | short_name=short_name_var, admin=False) |
|
216 | short_name=short_name_var, admin=False) | |
217 |
|
217 | |||
218 | def last_change(last_change): |
|
218 | def last_change(last_change): | |
219 | if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo: |
|
219 | if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo: | |
220 | last_change = last_change + datetime.timedelta(seconds= |
|
220 | last_change = last_change + datetime.timedelta(seconds= | |
221 | (datetime.datetime.now() - datetime.datetime.utcnow()).seconds) |
|
221 | (datetime.datetime.now() - datetime.datetime.utcnow()).seconds) | |
222 | return _render("last_change", last_change) |
|
222 | return _render("last_change", last_change) | |
223 |
|
223 | |||
224 | def rss_lnk(repo_name): |
|
224 | def rss_lnk(repo_name): | |
225 | return _render("rss", repo_name) |
|
225 | return _render("rss", repo_name) | |
226 |
|
226 | |||
227 | def atom_lnk(repo_name): |
|
227 | def atom_lnk(repo_name): | |
228 | return _render("atom", repo_name) |
|
228 | return _render("atom", repo_name) | |
229 |
|
229 | |||
230 | def last_rev(repo_name, cs_cache): |
|
230 | def last_rev(repo_name, cs_cache): | |
231 | return _render('revision', repo_name, cs_cache.get('revision'), |
|
231 | return _render('revision', repo_name, cs_cache.get('revision'), | |
232 | cs_cache.get('raw_id'), cs_cache.get('author'), |
|
232 | cs_cache.get('raw_id'), cs_cache.get('author'), | |
233 | cs_cache.get('message'), cs_cache.get('date')) |
|
233 | cs_cache.get('message'), cs_cache.get('date')) | |
234 |
|
234 | |||
235 | def desc(desc): |
|
235 | def desc(desc): | |
236 | return _render('repo_desc', desc, c.visual.stylify_metatags) |
|
236 | return _render('repo_desc', desc, c.visual.stylify_metatags) | |
237 |
|
237 | |||
238 | def state(repo_state): |
|
238 | def state(repo_state): | |
239 | return _render("repo_state", repo_state) |
|
239 | return _render("repo_state", repo_state) | |
240 |
|
240 | |||
241 | def repo_actions(repo_name): |
|
241 | def repo_actions(repo_name): | |
242 | return _render('repo_actions', repo_name, super_user_actions) |
|
242 | return _render('repo_actions', repo_name, super_user_actions) | |
243 |
|
243 | |||
244 | def user_profile(username): |
|
244 | def user_profile(username): | |
245 | return _render('user_profile', username) |
|
245 | return _render('user_profile', username) | |
246 |
|
246 | |||
247 | repos_data = [] |
|
247 | repos_data = [] | |
248 | for repo in repo_list: |
|
248 | for repo in repo_list: | |
249 | cs_cache = repo.changeset_cache |
|
249 | cs_cache = repo.changeset_cache | |
250 | row = { |
|
250 | row = { | |
251 | "menu": quick_menu(repo.repo_name), |
|
251 | "menu": quick_menu(repo.repo_name), | |
252 |
|
252 | |||
253 | "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state, |
|
253 | "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state, | |
254 | repo.private, repo.archived, repo.fork), |
|
254 | repo.private, repo.archived, repo.fork), | |
255 | "name_raw": repo.repo_name.lower(), |
|
255 | "name_raw": repo.repo_name.lower(), | |
256 |
|
256 | |||
257 | "last_change": last_change(repo.last_commit_change), |
|
257 | "last_change": last_change(repo.last_commit_change), | |
258 | "last_change_raw": datetime_to_time(repo.last_commit_change), |
|
258 | "last_change_raw": datetime_to_time(repo.last_commit_change), | |
259 |
|
259 | |||
260 | "last_changeset": last_rev(repo.repo_name, cs_cache), |
|
260 | "last_changeset": last_rev(repo.repo_name, cs_cache), | |
261 | "last_changeset_raw": cs_cache.get('revision'), |
|
261 | "last_changeset_raw": cs_cache.get('revision'), | |
262 |
|
262 | |||
263 | "desc": desc(repo.description_safe), |
|
263 | "desc": desc(repo.description_safe), | |
264 | "owner": user_profile(repo.user.username), |
|
264 | "owner": user_profile(repo.user.username), | |
265 |
|
265 | |||
266 | "state": state(repo.repo_state), |
|
266 | "state": state(repo.repo_state), | |
267 | "rss": rss_lnk(repo.repo_name), |
|
267 | "rss": rss_lnk(repo.repo_name), | |
268 |
|
268 | |||
269 | "atom": atom_lnk(repo.repo_name), |
|
269 | "atom": atom_lnk(repo.repo_name), | |
270 | } |
|
270 | } | |
271 | if admin: |
|
271 | if admin: | |
272 | row.update({ |
|
272 | row.update({ | |
273 | "action": repo_actions(repo.repo_name), |
|
273 | "action": repo_actions(repo.repo_name), | |
274 | }) |
|
274 | }) | |
275 | repos_data.append(row) |
|
275 | repos_data.append(row) | |
276 |
|
276 | |||
277 | return repos_data |
|
277 | return repos_data | |
278 |
|
278 | |||
279 | def _get_defaults(self, repo_name): |
|
279 | def _get_defaults(self, repo_name): | |
280 | """ |
|
280 | """ | |
281 | Gets information about repository, and returns a dict for |
|
281 | Gets information about repository, and returns a dict for | |
282 | usage in forms |
|
282 | usage in forms | |
283 |
|
283 | |||
284 | :param repo_name: |
|
284 | :param repo_name: | |
285 | """ |
|
285 | """ | |
286 |
|
286 | |||
287 | repo_info = Repository.get_by_repo_name(repo_name) |
|
287 | repo_info = Repository.get_by_repo_name(repo_name) | |
288 |
|
288 | |||
289 | if repo_info is None: |
|
289 | if repo_info is None: | |
290 | return None |
|
290 | return None | |
291 |
|
291 | |||
292 | defaults = repo_info.get_dict() |
|
292 | defaults = repo_info.get_dict() | |
293 | defaults['repo_name'] = repo_info.just_name |
|
293 | defaults['repo_name'] = repo_info.just_name | |
294 |
|
294 | |||
295 | groups = repo_info.groups_with_parents |
|
295 | groups = repo_info.groups_with_parents | |
296 | parent_group = groups[-1] if groups else None |
|
296 | parent_group = groups[-1] if groups else None | |
297 |
|
297 | |||
298 | # we use -1 as this is how in HTML, we mark an empty group |
|
298 | # we use -1 as this is how in HTML, we mark an empty group | |
299 | defaults['repo_group'] = getattr(parent_group, 'group_id', -1) |
|
299 | defaults['repo_group'] = getattr(parent_group, 'group_id', -1) | |
300 |
|
300 | |||
301 | keys_to_process = ( |
|
301 | keys_to_process = ( | |
302 | {'k': 'repo_type', 'strip': False}, |
|
302 | {'k': 'repo_type', 'strip': False}, | |
303 | {'k': 'repo_enable_downloads', 'strip': True}, |
|
303 | {'k': 'repo_enable_downloads', 'strip': True}, | |
304 | {'k': 'repo_description', 'strip': True}, |
|
304 | {'k': 'repo_description', 'strip': True}, | |
305 | {'k': 'repo_enable_locking', 'strip': True}, |
|
305 | {'k': 'repo_enable_locking', 'strip': True}, | |
306 | {'k': 'repo_landing_rev', 'strip': True}, |
|
306 | {'k': 'repo_landing_rev', 'strip': True}, | |
307 | {'k': 'clone_uri', 'strip': False}, |
|
307 | {'k': 'clone_uri', 'strip': False}, | |
308 | {'k': 'push_uri', 'strip': False}, |
|
308 | {'k': 'push_uri', 'strip': False}, | |
309 | {'k': 'repo_private', 'strip': True}, |
|
309 | {'k': 'repo_private', 'strip': True}, | |
310 | {'k': 'repo_enable_statistics', 'strip': True} |
|
310 | {'k': 'repo_enable_statistics', 'strip': True} | |
311 | ) |
|
311 | ) | |
312 |
|
312 | |||
313 | for item in keys_to_process: |
|
313 | for item in keys_to_process: | |
314 | attr = item['k'] |
|
314 | attr = item['k'] | |
315 | if item['strip']: |
|
315 | if item['strip']: | |
316 | attr = remove_prefix(item['k'], 'repo_') |
|
316 | attr = remove_prefix(item['k'], 'repo_') | |
317 |
|
317 | |||
318 | val = defaults[attr] |
|
318 | val = defaults[attr] | |
319 | if item['k'] == 'repo_landing_rev': |
|
319 | if item['k'] == 'repo_landing_rev': | |
320 | val = ':'.join(defaults[attr]) |
|
320 | val = ':'.join(defaults[attr]) | |
321 | defaults[item['k']] = val |
|
321 | defaults[item['k']] = val | |
322 | if item['k'] == 'clone_uri': |
|
322 | if item['k'] == 'clone_uri': | |
323 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden |
|
323 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden | |
324 | if item['k'] == 'push_uri': |
|
324 | if item['k'] == 'push_uri': | |
325 | defaults['push_uri_hidden'] = repo_info.push_uri_hidden |
|
325 | defaults['push_uri_hidden'] = repo_info.push_uri_hidden | |
326 |
|
326 | |||
327 | # fill owner |
|
327 | # fill owner | |
328 | if repo_info.user: |
|
328 | if repo_info.user: | |
329 | defaults.update({'user': repo_info.user.username}) |
|
329 | defaults.update({'user': repo_info.user.username}) | |
330 | else: |
|
330 | else: | |
331 | replacement_user = User.get_first_super_admin().username |
|
331 | replacement_user = User.get_first_super_admin().username | |
332 | defaults.update({'user': replacement_user}) |
|
332 | defaults.update({'user': replacement_user}) | |
333 |
|
333 | |||
334 | return defaults |
|
334 | return defaults | |
335 |
|
335 | |||
336 | def update(self, repo, **kwargs): |
|
336 | def update(self, repo, **kwargs): | |
337 | try: |
|
337 | try: | |
338 | cur_repo = self._get_repo(repo) |
|
338 | cur_repo = self._get_repo(repo) | |
339 | source_repo_name = cur_repo.repo_name |
|
339 | source_repo_name = cur_repo.repo_name | |
340 | if 'user' in kwargs: |
|
340 | if 'user' in kwargs: | |
341 | cur_repo.user = User.get_by_username(kwargs['user']) |
|
341 | cur_repo.user = User.get_by_username(kwargs['user']) | |
342 |
|
342 | |||
343 | if 'repo_group' in kwargs: |
|
343 | if 'repo_group' in kwargs: | |
344 | cur_repo.group = RepoGroup.get(kwargs['repo_group']) |
|
344 | cur_repo.group = RepoGroup.get(kwargs['repo_group']) | |
345 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) |
|
345 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) | |
346 |
|
346 | |||
347 | update_keys = [ |
|
347 | update_keys = [ | |
348 | (1, 'repo_description'), |
|
348 | (1, 'repo_description'), | |
349 | (1, 'repo_landing_rev'), |
|
349 | (1, 'repo_landing_rev'), | |
350 | (1, 'repo_private'), |
|
350 | (1, 'repo_private'), | |
351 | (1, 'repo_enable_downloads'), |
|
351 | (1, 'repo_enable_downloads'), | |
352 | (1, 'repo_enable_locking'), |
|
352 | (1, 'repo_enable_locking'), | |
353 | (1, 'repo_enable_statistics'), |
|
353 | (1, 'repo_enable_statistics'), | |
354 | (0, 'clone_uri'), |
|
354 | (0, 'clone_uri'), | |
355 | (0, 'push_uri'), |
|
355 | (0, 'push_uri'), | |
356 | (0, 'fork_id') |
|
356 | (0, 'fork_id') | |
357 | ] |
|
357 | ] | |
358 | for strip, k in update_keys: |
|
358 | for strip, k in update_keys: | |
359 | if k in kwargs: |
|
359 | if k in kwargs: | |
360 | val = kwargs[k] |
|
360 | val = kwargs[k] | |
361 | if strip: |
|
361 | if strip: | |
362 | k = remove_prefix(k, 'repo_') |
|
362 | k = remove_prefix(k, 'repo_') | |
363 |
|
363 | |||
364 | setattr(cur_repo, k, val) |
|
364 | setattr(cur_repo, k, val) | |
365 |
|
365 | |||
366 | new_name = cur_repo.get_new_name(kwargs['repo_name']) |
|
366 | new_name = cur_repo.get_new_name(kwargs['repo_name']) | |
367 | cur_repo.repo_name = new_name |
|
367 | cur_repo.repo_name = new_name | |
368 |
|
368 | |||
369 | # if private flag is set, reset default permission to NONE |
|
369 | # if private flag is set, reset default permission to NONE | |
370 | if kwargs.get('repo_private'): |
|
370 | if kwargs.get('repo_private'): | |
371 | EMPTY_PERM = 'repository.none' |
|
371 | EMPTY_PERM = 'repository.none' | |
372 | RepoModel().grant_user_permission( |
|
372 | RepoModel().grant_user_permission( | |
373 | repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM |
|
373 | repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM | |
374 | ) |
|
374 | ) | |
375 |
|
375 | |||
376 | # handle extra fields |
|
376 | # handle extra fields | |
377 | for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs): |
|
377 | for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs): | |
378 | k = RepositoryField.un_prefix_key(field) |
|
378 | k = RepositoryField.un_prefix_key(field) | |
379 | ex_field = RepositoryField.get_by_key_name( |
|
379 | ex_field = RepositoryField.get_by_key_name( | |
380 | key=k, repo=cur_repo) |
|
380 | key=k, repo=cur_repo) | |
381 | if ex_field: |
|
381 | if ex_field: | |
382 | ex_field.field_value = kwargs[field] |
|
382 | ex_field.field_value = kwargs[field] | |
383 | self.sa.add(ex_field) |
|
383 | self.sa.add(ex_field) | |
384 | cur_repo.updated_on = datetime.datetime.now() |
|
384 | cur_repo.updated_on = datetime.datetime.now() | |
385 | self.sa.add(cur_repo) |
|
385 | self.sa.add(cur_repo) | |
386 |
|
386 | |||
387 | if source_repo_name != new_name: |
|
387 | if source_repo_name != new_name: | |
388 | # rename repository |
|
388 | # rename repository | |
389 | self._rename_filesystem_repo( |
|
389 | self._rename_filesystem_repo( | |
390 | old=source_repo_name, new=new_name) |
|
390 | old=source_repo_name, new=new_name) | |
391 |
|
391 | |||
392 | return cur_repo |
|
392 | return cur_repo | |
393 | except Exception: |
|
393 | except Exception: | |
394 | log.error(traceback.format_exc()) |
|
394 | log.error(traceback.format_exc()) | |
395 | raise |
|
395 | raise | |
396 |
|
396 | |||
397 | def _create_repo(self, repo_name, repo_type, description, owner, |
|
397 | def _create_repo(self, repo_name, repo_type, description, owner, | |
398 | private=False, clone_uri=None, repo_group=None, |
|
398 | private=False, clone_uri=None, repo_group=None, | |
399 | landing_rev='rev:tip', fork_of=None, |
|
399 | landing_rev='rev:tip', fork_of=None, | |
400 | copy_fork_permissions=False, enable_statistics=False, |
|
400 | copy_fork_permissions=False, enable_statistics=False, | |
401 | enable_locking=False, enable_downloads=False, |
|
401 | enable_locking=False, enable_downloads=False, | |
402 | copy_group_permissions=False, |
|
402 | copy_group_permissions=False, | |
403 | state=Repository.STATE_PENDING): |
|
403 | state=Repository.STATE_PENDING): | |
404 | """ |
|
404 | """ | |
405 | Create repository inside database with PENDING state, this should be |
|
405 | Create repository inside database with PENDING state, this should be | |
406 | only executed by create() repo. With exception of importing existing |
|
406 | only executed by create() repo. With exception of importing existing | |
407 | repos |
|
407 | repos | |
408 | """ |
|
408 | """ | |
409 | from rhodecode.model.scm import ScmModel |
|
409 | from rhodecode.model.scm import ScmModel | |
410 |
|
410 | |||
411 | owner = self._get_user(owner) |
|
411 | owner = self._get_user(owner) | |
412 | fork_of = self._get_repo(fork_of) |
|
412 | fork_of = self._get_repo(fork_of) | |
413 | repo_group = self._get_repo_group(safe_int(repo_group)) |
|
413 | repo_group = self._get_repo_group(safe_int(repo_group)) | |
414 |
|
414 | |||
415 | try: |
|
415 | try: | |
416 | repo_name = safe_unicode(repo_name) |
|
416 | repo_name = safe_unicode(repo_name) | |
417 | description = safe_unicode(description) |
|
417 | description = safe_unicode(description) | |
418 | # repo name is just a name of repository |
|
418 | # repo name is just a name of repository | |
419 | # while repo_name_full is a full qualified name that is combined |
|
419 | # while repo_name_full is a full qualified name that is combined | |
420 | # with name and path of group |
|
420 | # with name and path of group | |
421 | repo_name_full = repo_name |
|
421 | repo_name_full = repo_name | |
422 | repo_name = repo_name.split(Repository.NAME_SEP)[-1] |
|
422 | repo_name = repo_name.split(Repository.NAME_SEP)[-1] | |
423 |
|
423 | |||
424 | new_repo = Repository() |
|
424 | new_repo = Repository() | |
425 | new_repo.repo_state = state |
|
425 | new_repo.repo_state = state | |
426 | new_repo.enable_statistics = False |
|
426 | new_repo.enable_statistics = False | |
427 | new_repo.repo_name = repo_name_full |
|
427 | new_repo.repo_name = repo_name_full | |
428 | new_repo.repo_type = repo_type |
|
428 | new_repo.repo_type = repo_type | |
429 | new_repo.user = owner |
|
429 | new_repo.user = owner | |
430 | new_repo.group = repo_group |
|
430 | new_repo.group = repo_group | |
431 | new_repo.description = description or repo_name |
|
431 | new_repo.description = description or repo_name | |
432 | new_repo.private = private |
|
432 | new_repo.private = private | |
433 | new_repo.archived = False |
|
433 | new_repo.archived = False | |
434 | new_repo.clone_uri = clone_uri |
|
434 | new_repo.clone_uri = clone_uri | |
435 | new_repo.landing_rev = landing_rev |
|
435 | new_repo.landing_rev = landing_rev | |
436 |
|
436 | |||
437 | new_repo.enable_statistics = enable_statistics |
|
437 | new_repo.enable_statistics = enable_statistics | |
438 | new_repo.enable_locking = enable_locking |
|
438 | new_repo.enable_locking = enable_locking | |
439 | new_repo.enable_downloads = enable_downloads |
|
439 | new_repo.enable_downloads = enable_downloads | |
440 |
|
440 | |||
441 | if repo_group: |
|
441 | if repo_group: | |
442 | new_repo.enable_locking = repo_group.enable_locking |
|
442 | new_repo.enable_locking = repo_group.enable_locking | |
443 |
|
443 | |||
444 | if fork_of: |
|
444 | if fork_of: | |
445 | parent_repo = fork_of |
|
445 | parent_repo = fork_of | |
446 | new_repo.fork = parent_repo |
|
446 | new_repo.fork = parent_repo | |
447 |
|
447 | |||
448 | events.trigger(events.RepoPreCreateEvent(new_repo)) |
|
448 | events.trigger(events.RepoPreCreateEvent(new_repo)) | |
449 |
|
449 | |||
450 | self.sa.add(new_repo) |
|
450 | self.sa.add(new_repo) | |
451 |
|
451 | |||
452 | EMPTY_PERM = 'repository.none' |
|
452 | EMPTY_PERM = 'repository.none' | |
453 | if fork_of and copy_fork_permissions: |
|
453 | if fork_of and copy_fork_permissions: | |
454 | repo = fork_of |
|
454 | repo = fork_of | |
455 | user_perms = UserRepoToPerm.query() \ |
|
455 | user_perms = UserRepoToPerm.query() \ | |
456 | .filter(UserRepoToPerm.repository == repo).all() |
|
456 | .filter(UserRepoToPerm.repository == repo).all() | |
457 | group_perms = UserGroupRepoToPerm.query() \ |
|
457 | group_perms = UserGroupRepoToPerm.query() \ | |
458 | .filter(UserGroupRepoToPerm.repository == repo).all() |
|
458 | .filter(UserGroupRepoToPerm.repository == repo).all() | |
459 |
|
459 | |||
460 | for perm in user_perms: |
|
460 | for perm in user_perms: | |
461 | UserRepoToPerm.create( |
|
461 | UserRepoToPerm.create( | |
462 | perm.user, new_repo, perm.permission) |
|
462 | perm.user, new_repo, perm.permission) | |
463 |
|
463 | |||
464 | for perm in group_perms: |
|
464 | for perm in group_perms: | |
465 | UserGroupRepoToPerm.create( |
|
465 | UserGroupRepoToPerm.create( | |
466 | perm.users_group, new_repo, perm.permission) |
|
466 | perm.users_group, new_repo, perm.permission) | |
467 | # in case we copy permissions and also set this repo to private |
|
467 | # in case we copy permissions and also set this repo to private | |
468 | # override the default user permission to make it a private repo |
|
468 | # override the default user permission to make it a private repo | |
469 | if private: |
|
469 | if private: | |
470 | RepoModel(self.sa).grant_user_permission( |
|
470 | RepoModel(self.sa).grant_user_permission( | |
471 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
471 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) | |
472 |
|
472 | |||
473 | elif repo_group and copy_group_permissions: |
|
473 | elif repo_group and copy_group_permissions: | |
474 | user_perms = UserRepoGroupToPerm.query() \ |
|
474 | user_perms = UserRepoGroupToPerm.query() \ | |
475 | .filter(UserRepoGroupToPerm.group == repo_group).all() |
|
475 | .filter(UserRepoGroupToPerm.group == repo_group).all() | |
476 |
|
476 | |||
477 | group_perms = UserGroupRepoGroupToPerm.query() \ |
|
477 | group_perms = UserGroupRepoGroupToPerm.query() \ | |
478 | .filter(UserGroupRepoGroupToPerm.group == repo_group).all() |
|
478 | .filter(UserGroupRepoGroupToPerm.group == repo_group).all() | |
479 |
|
479 | |||
480 | for perm in user_perms: |
|
480 | for perm in user_perms: | |
481 | perm_name = perm.permission.permission_name.replace( |
|
481 | perm_name = perm.permission.permission_name.replace( | |
482 | 'group.', 'repository.') |
|
482 | 'group.', 'repository.') | |
483 | perm_obj = Permission.get_by_key(perm_name) |
|
483 | perm_obj = Permission.get_by_key(perm_name) | |
484 | UserRepoToPerm.create(perm.user, new_repo, perm_obj) |
|
484 | UserRepoToPerm.create(perm.user, new_repo, perm_obj) | |
485 |
|
485 | |||
486 | for perm in group_perms: |
|
486 | for perm in group_perms: | |
487 | perm_name = perm.permission.permission_name.replace( |
|
487 | perm_name = perm.permission.permission_name.replace( | |
488 | 'group.', 'repository.') |
|
488 | 'group.', 'repository.') | |
489 | perm_obj = Permission.get_by_key(perm_name) |
|
489 | perm_obj = Permission.get_by_key(perm_name) | |
490 | UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj) |
|
490 | UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj) | |
491 |
|
491 | |||
492 | if private: |
|
492 | if private: | |
493 | RepoModel(self.sa).grant_user_permission( |
|
493 | RepoModel(self.sa).grant_user_permission( | |
494 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
494 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) | |
495 |
|
495 | |||
496 | else: |
|
496 | else: | |
497 | perm_obj = self._create_default_perms(new_repo, private) |
|
497 | perm_obj = self._create_default_perms(new_repo, private) | |
498 | self.sa.add(perm_obj) |
|
498 | self.sa.add(perm_obj) | |
499 |
|
499 | |||
500 | # now automatically start following this repository as owner |
|
500 | # now automatically start following this repository as owner | |
501 | ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id) |
|
501 | ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id) | |
502 |
|
502 | |||
503 | # we need to flush here, in order to check if database won't |
|
503 | # we need to flush here, in order to check if database won't | |
504 | # throw any exceptions, create filesystem dirs at the very end |
|
504 | # throw any exceptions, create filesystem dirs at the very end | |
505 | self.sa.flush() |
|
505 | self.sa.flush() | |
506 | events.trigger(events.RepoCreateEvent(new_repo)) |
|
506 | events.trigger(events.RepoCreateEvent(new_repo)) | |
507 | return new_repo |
|
507 | return new_repo | |
508 |
|
508 | |||
509 | except Exception: |
|
509 | except Exception: | |
510 | log.error(traceback.format_exc()) |
|
510 | log.error(traceback.format_exc()) | |
511 | raise |
|
511 | raise | |
512 |
|
512 | |||
513 | def create(self, form_data, cur_user): |
|
513 | def create(self, form_data, cur_user): | |
514 | """ |
|
514 | """ | |
515 | Create repository using celery tasks |
|
515 | Create repository using celery tasks | |
516 |
|
516 | |||
517 | :param form_data: |
|
517 | :param form_data: | |
518 | :param cur_user: |
|
518 | :param cur_user: | |
519 | """ |
|
519 | """ | |
520 | from rhodecode.lib.celerylib import tasks, run_task |
|
520 | from rhodecode.lib.celerylib import tasks, run_task | |
521 | return run_task(tasks.create_repo, form_data, cur_user) |
|
521 | return run_task(tasks.create_repo, form_data, cur_user) | |
522 |
|
522 | |||
523 | def update_permissions(self, repo, perm_additions=None, perm_updates=None, |
|
523 | def update_permissions(self, repo, perm_additions=None, perm_updates=None, | |
524 | perm_deletions=None, check_perms=True, |
|
524 | perm_deletions=None, check_perms=True, | |
525 | cur_user=None): |
|
525 | cur_user=None): | |
526 | if not perm_additions: |
|
526 | if not perm_additions: | |
527 | perm_additions = [] |
|
527 | perm_additions = [] | |
528 | if not perm_updates: |
|
528 | if not perm_updates: | |
529 | perm_updates = [] |
|
529 | perm_updates = [] | |
530 | if not perm_deletions: |
|
530 | if not perm_deletions: | |
531 | perm_deletions = [] |
|
531 | perm_deletions = [] | |
532 |
|
532 | |||
533 | req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin') |
|
533 | req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin') | |
534 |
|
534 | |||
535 | changes = { |
|
535 | changes = { | |
536 | 'added': [], |
|
536 | 'added': [], | |
537 | 'updated': [], |
|
537 | 'updated': [], | |
538 | 'deleted': [] |
|
538 | 'deleted': [] | |
539 | } |
|
539 | } | |
540 | # update permissions |
|
540 | # update permissions | |
541 | for member_id, perm, member_type in perm_updates: |
|
541 | for member_id, perm, member_type in perm_updates: | |
542 | member_id = int(member_id) |
|
542 | member_id = int(member_id) | |
543 | if member_type == 'user': |
|
543 | if member_type == 'user': | |
544 | member_name = User.get(member_id).username |
|
544 | member_name = User.get(member_id).username | |
545 | # this updates also current one if found |
|
545 | # this updates also current one if found | |
546 | self.grant_user_permission( |
|
546 | self.grant_user_permission( | |
547 | repo=repo, user=member_id, perm=perm) |
|
547 | repo=repo, user=member_id, perm=perm) | |
548 | elif member_type == 'user_group': |
|
548 | elif member_type == 'user_group': | |
549 | # check if we have permissions to alter this usergroup |
|
549 | # check if we have permissions to alter this usergroup | |
550 | member_name = UserGroup.get(member_id).users_group_name |
|
550 | member_name = UserGroup.get(member_id).users_group_name | |
551 | if not check_perms or HasUserGroupPermissionAny( |
|
551 | if not check_perms or HasUserGroupPermissionAny( | |
552 | *req_perms)(member_name, user=cur_user): |
|
552 | *req_perms)(member_name, user=cur_user): | |
553 | self.grant_user_group_permission( |
|
553 | self.grant_user_group_permission( | |
554 | repo=repo, group_name=member_id, perm=perm) |
|
554 | repo=repo, group_name=member_id, perm=perm) | |
555 | else: |
|
555 | else: | |
556 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
556 | raise ValueError("member_type must be 'user' or 'user_group' " | |
557 | "got {} instead".format(member_type)) |
|
557 | "got {} instead".format(member_type)) | |
558 | changes['updated'].append({'type': member_type, 'id': member_id, |
|
558 | changes['updated'].append({'type': member_type, 'id': member_id, | |
559 | 'name': member_name, 'new_perm': perm}) |
|
559 | 'name': member_name, 'new_perm': perm}) | |
560 |
|
560 | |||
561 | # set new permissions |
|
561 | # set new permissions | |
562 | for member_id, perm, member_type in perm_additions: |
|
562 | for member_id, perm, member_type in perm_additions: | |
563 | member_id = int(member_id) |
|
563 | member_id = int(member_id) | |
564 | if member_type == 'user': |
|
564 | if member_type == 'user': | |
565 | member_name = User.get(member_id).username |
|
565 | member_name = User.get(member_id).username | |
566 | self.grant_user_permission( |
|
566 | self.grant_user_permission( | |
567 | repo=repo, user=member_id, perm=perm) |
|
567 | repo=repo, user=member_id, perm=perm) | |
568 | elif member_type == 'user_group': |
|
568 | elif member_type == 'user_group': | |
569 | # check if we have permissions to alter this usergroup |
|
569 | # check if we have permissions to alter this usergroup | |
570 | member_name = UserGroup.get(member_id).users_group_name |
|
570 | member_name = UserGroup.get(member_id).users_group_name | |
571 | if not check_perms or HasUserGroupPermissionAny( |
|
571 | if not check_perms or HasUserGroupPermissionAny( | |
572 | *req_perms)(member_name, user=cur_user): |
|
572 | *req_perms)(member_name, user=cur_user): | |
573 | self.grant_user_group_permission( |
|
573 | self.grant_user_group_permission( | |
574 | repo=repo, group_name=member_id, perm=perm) |
|
574 | repo=repo, group_name=member_id, perm=perm) | |
575 | else: |
|
575 | else: | |
576 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
576 | raise ValueError("member_type must be 'user' or 'user_group' " | |
577 | "got {} instead".format(member_type)) |
|
577 | "got {} instead".format(member_type)) | |
578 |
|
578 | |||
579 | changes['added'].append({'type': member_type, 'id': member_id, |
|
579 | changes['added'].append({'type': member_type, 'id': member_id, | |
580 | 'name': member_name, 'new_perm': perm}) |
|
580 | 'name': member_name, 'new_perm': perm}) | |
581 | # delete permissions |
|
581 | # delete permissions | |
582 | for member_id, perm, member_type in perm_deletions: |
|
582 | for member_id, perm, member_type in perm_deletions: | |
583 | member_id = int(member_id) |
|
583 | member_id = int(member_id) | |
584 | if member_type == 'user': |
|
584 | if member_type == 'user': | |
585 | member_name = User.get(member_id).username |
|
585 | member_name = User.get(member_id).username | |
586 | self.revoke_user_permission(repo=repo, user=member_id) |
|
586 | self.revoke_user_permission(repo=repo, user=member_id) | |
587 | elif member_type == 'user_group': |
|
587 | elif member_type == 'user_group': | |
588 | # check if we have permissions to alter this usergroup |
|
588 | # check if we have permissions to alter this usergroup | |
589 | member_name = UserGroup.get(member_id).users_group_name |
|
589 | member_name = UserGroup.get(member_id).users_group_name | |
590 | if not check_perms or HasUserGroupPermissionAny( |
|
590 | if not check_perms or HasUserGroupPermissionAny( | |
591 | *req_perms)(member_name, user=cur_user): |
|
591 | *req_perms)(member_name, user=cur_user): | |
592 | self.revoke_user_group_permission( |
|
592 | self.revoke_user_group_permission( | |
593 | repo=repo, group_name=member_id) |
|
593 | repo=repo, group_name=member_id) | |
594 | else: |
|
594 | else: | |
595 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
595 | raise ValueError("member_type must be 'user' or 'user_group' " | |
596 | "got {} instead".format(member_type)) |
|
596 | "got {} instead".format(member_type)) | |
597 |
|
597 | |||
598 | changes['deleted'].append({'type': member_type, 'id': member_id, |
|
598 | changes['deleted'].append({'type': member_type, 'id': member_id, | |
599 | 'name': member_name, 'new_perm': perm}) |
|
599 | 'name': member_name, 'new_perm': perm}) | |
600 | return changes |
|
600 | return changes | |
601 |
|
601 | |||
602 | def create_fork(self, form_data, cur_user): |
|
602 | def create_fork(self, form_data, cur_user): | |
603 | """ |
|
603 | """ | |
604 | Simple wrapper into executing celery task for fork creation |
|
604 | Simple wrapper into executing celery task for fork creation | |
605 |
|
605 | |||
606 | :param form_data: |
|
606 | :param form_data: | |
607 | :param cur_user: |
|
607 | :param cur_user: | |
608 | """ |
|
608 | """ | |
609 | from rhodecode.lib.celerylib import tasks, run_task |
|
609 | from rhodecode.lib.celerylib import tasks, run_task | |
610 | return run_task(tasks.create_repo_fork, form_data, cur_user) |
|
610 | return run_task(tasks.create_repo_fork, form_data, cur_user) | |
611 |
|
611 | |||
612 | def archive(self, repo): |
|
612 | def archive(self, repo): | |
613 | """ |
|
613 | """ | |
614 | Archive given repository. Set archive flag. |
|
614 | Archive given repository. Set archive flag. | |
615 |
|
615 | |||
616 | :param repo: |
|
616 | :param repo: | |
617 | """ |
|
617 | """ | |
618 | repo = self._get_repo(repo) |
|
618 | repo = self._get_repo(repo) | |
619 | if repo: |
|
619 | if repo: | |
620 |
|
620 | |||
621 | try: |
|
621 | try: | |
622 | repo.archived = True |
|
622 | repo.archived = True | |
623 | self.sa.add(repo) |
|
623 | self.sa.add(repo) | |
624 | self.sa.commit() |
|
624 | self.sa.commit() | |
625 | except Exception: |
|
625 | except Exception: | |
626 | log.error(traceback.format_exc()) |
|
626 | log.error(traceback.format_exc()) | |
627 | raise |
|
627 | raise | |
628 |
|
628 | |||
629 | def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None): |
|
629 | def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None): | |
630 | """ |
|
630 | """ | |
631 | Delete given repository, forks parameter defines what do do with |
|
631 | Delete given repository, forks parameter defines what do do with | |
632 | attached forks. Throws AttachedForksError if deleted repo has attached |
|
632 | attached forks. Throws AttachedForksError if deleted repo has attached | |
633 | forks |
|
633 | forks | |
634 |
|
634 | |||
635 | :param repo: |
|
635 | :param repo: | |
636 | :param forks: str 'delete' or 'detach' |
|
636 | :param forks: str 'delete' or 'detach' | |
637 | :param pull_requests: str 'delete' or None |
|
637 | :param pull_requests: str 'delete' or None | |
638 | :param fs_remove: remove(archive) repo from filesystem |
|
638 | :param fs_remove: remove(archive) repo from filesystem | |
639 | """ |
|
639 | """ | |
640 | if not cur_user: |
|
640 | if not cur_user: | |
641 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
641 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) | |
642 | repo = self._get_repo(repo) |
|
642 | repo = self._get_repo(repo) | |
643 | if repo: |
|
643 | if repo: | |
644 | if forks == 'detach': |
|
644 | if forks == 'detach': | |
645 | for r in repo.forks: |
|
645 | for r in repo.forks: | |
646 | r.fork = None |
|
646 | r.fork = None | |
647 | self.sa.add(r) |
|
647 | self.sa.add(r) | |
648 | elif forks == 'delete': |
|
648 | elif forks == 'delete': | |
649 | for r in repo.forks: |
|
649 | for r in repo.forks: | |
650 | self.delete(r, forks='delete') |
|
650 | self.delete(r, forks='delete') | |
651 | elif [f for f in repo.forks]: |
|
651 | elif [f for f in repo.forks]: | |
652 | raise AttachedForksError() |
|
652 | raise AttachedForksError() | |
653 |
|
653 | |||
654 | # check for pull requests |
|
654 | # check for pull requests | |
655 | pr_sources = repo.pull_requests_source |
|
655 | pr_sources = repo.pull_requests_source | |
656 | pr_targets = repo.pull_requests_target |
|
656 | pr_targets = repo.pull_requests_target | |
657 | if pull_requests != 'delete' and (pr_sources or pr_targets): |
|
657 | if pull_requests != 'delete' and (pr_sources or pr_targets): | |
658 | raise AttachedPullRequestsError() |
|
658 | raise AttachedPullRequestsError() | |
659 |
|
659 | |||
660 | old_repo_dict = repo.get_dict() |
|
660 | old_repo_dict = repo.get_dict() | |
661 | events.trigger(events.RepoPreDeleteEvent(repo)) |
|
661 | events.trigger(events.RepoPreDeleteEvent(repo)) | |
662 | try: |
|
662 | try: | |
663 | self.sa.delete(repo) |
|
663 | self.sa.delete(repo) | |
664 | if fs_remove: |
|
664 | if fs_remove: | |
665 | self._delete_filesystem_repo(repo) |
|
665 | self._delete_filesystem_repo(repo) | |
666 | else: |
|
666 | else: | |
667 | log.debug('skipping removal from filesystem') |
|
667 | log.debug('skipping removal from filesystem') | |
668 | old_repo_dict.update({ |
|
668 | old_repo_dict.update({ | |
669 | 'deleted_by': cur_user, |
|
669 | 'deleted_by': cur_user, | |
670 | 'deleted_on': time.time(), |
|
670 | 'deleted_on': time.time(), | |
671 | }) |
|
671 | }) | |
672 | log_delete_repository(**old_repo_dict) |
|
672 | log_delete_repository(**old_repo_dict) | |
673 | events.trigger(events.RepoDeleteEvent(repo)) |
|
673 | events.trigger(events.RepoDeleteEvent(repo)) | |
674 | except Exception: |
|
674 | except Exception: | |
675 | log.error(traceback.format_exc()) |
|
675 | log.error(traceback.format_exc()) | |
676 | raise |
|
676 | raise | |
677 |
|
677 | |||
678 | def grant_user_permission(self, repo, user, perm): |
|
678 | def grant_user_permission(self, repo, user, perm): | |
679 | """ |
|
679 | """ | |
680 | Grant permission for user on given repository, or update existing one |
|
680 | Grant permission for user on given repository, or update existing one | |
681 | if found |
|
681 | if found | |
682 |
|
682 | |||
683 | :param repo: Instance of Repository, repository_id, or repository name |
|
683 | :param repo: Instance of Repository, repository_id, or repository name | |
684 | :param user: Instance of User, user_id or username |
|
684 | :param user: Instance of User, user_id or username | |
685 | :param perm: Instance of Permission, or permission_name |
|
685 | :param perm: Instance of Permission, or permission_name | |
686 | """ |
|
686 | """ | |
687 | user = self._get_user(user) |
|
687 | user = self._get_user(user) | |
688 | repo = self._get_repo(repo) |
|
688 | repo = self._get_repo(repo) | |
689 | permission = self._get_perm(perm) |
|
689 | permission = self._get_perm(perm) | |
690 |
|
690 | |||
691 | # check if we have that permission already |
|
691 | # check if we have that permission already | |
692 | obj = self.sa.query(UserRepoToPerm) \ |
|
692 | obj = self.sa.query(UserRepoToPerm) \ | |
693 | .filter(UserRepoToPerm.user == user) \ |
|
693 | .filter(UserRepoToPerm.user == user) \ | |
694 | .filter(UserRepoToPerm.repository == repo) \ |
|
694 | .filter(UserRepoToPerm.repository == repo) \ | |
695 | .scalar() |
|
695 | .scalar() | |
696 | if obj is None: |
|
696 | if obj is None: | |
697 | # create new ! |
|
697 | # create new ! | |
698 | obj = UserRepoToPerm() |
|
698 | obj = UserRepoToPerm() | |
699 | obj.repository = repo |
|
699 | obj.repository = repo | |
700 | obj.user = user |
|
700 | obj.user = user | |
701 | obj.permission = permission |
|
701 | obj.permission = permission | |
702 | self.sa.add(obj) |
|
702 | self.sa.add(obj) | |
703 | log.debug('Granted perm %s to %s on %s', perm, user, repo) |
|
703 | log.debug('Granted perm %s to %s on %s', perm, user, repo) | |
704 | action_logger_generic( |
|
704 | action_logger_generic( | |
705 | 'granted permission: {} to user: {} on repo: {}'.format( |
|
705 | 'granted permission: {} to user: {} on repo: {}'.format( | |
706 | perm, user, repo), namespace='security.repo') |
|
706 | perm, user, repo), namespace='security.repo') | |
707 | return obj |
|
707 | return obj | |
708 |
|
708 | |||
709 | def revoke_user_permission(self, repo, user): |
|
709 | def revoke_user_permission(self, repo, user): | |
710 | """ |
|
710 | """ | |
711 | Revoke permission for user on given repository |
|
711 | Revoke permission for user on given repository | |
712 |
|
712 | |||
713 | :param repo: Instance of Repository, repository_id, or repository name |
|
713 | :param repo: Instance of Repository, repository_id, or repository name | |
714 | :param user: Instance of User, user_id or username |
|
714 | :param user: Instance of User, user_id or username | |
715 | """ |
|
715 | """ | |
716 |
|
716 | |||
717 | user = self._get_user(user) |
|
717 | user = self._get_user(user) | |
718 | repo = self._get_repo(repo) |
|
718 | repo = self._get_repo(repo) | |
719 |
|
719 | |||
720 | obj = self.sa.query(UserRepoToPerm) \ |
|
720 | obj = self.sa.query(UserRepoToPerm) \ | |
721 | .filter(UserRepoToPerm.repository == repo) \ |
|
721 | .filter(UserRepoToPerm.repository == repo) \ | |
722 | .filter(UserRepoToPerm.user == user) \ |
|
722 | .filter(UserRepoToPerm.user == user) \ | |
723 | .scalar() |
|
723 | .scalar() | |
724 | if obj: |
|
724 | if obj: | |
725 | self.sa.delete(obj) |
|
725 | self.sa.delete(obj) | |
726 | log.debug('Revoked perm on %s on %s', repo, user) |
|
726 | log.debug('Revoked perm on %s on %s', repo, user) | |
727 | action_logger_generic( |
|
727 | action_logger_generic( | |
728 | 'revoked permission from user: {} on repo: {}'.format( |
|
728 | 'revoked permission from user: {} on repo: {}'.format( | |
729 | user, repo), namespace='security.repo') |
|
729 | user, repo), namespace='security.repo') | |
730 |
|
730 | |||
731 | def grant_user_group_permission(self, repo, group_name, perm): |
|
731 | def grant_user_group_permission(self, repo, group_name, perm): | |
732 | """ |
|
732 | """ | |
733 | Grant permission for user group on given repository, or update |
|
733 | Grant permission for user group on given repository, or update | |
734 | existing one if found |
|
734 | existing one if found | |
735 |
|
735 | |||
736 | :param repo: Instance of Repository, repository_id, or repository name |
|
736 | :param repo: Instance of Repository, repository_id, or repository name | |
737 | :param group_name: Instance of UserGroup, users_group_id, |
|
737 | :param group_name: Instance of UserGroup, users_group_id, | |
738 | or user group name |
|
738 | or user group name | |
739 | :param perm: Instance of Permission, or permission_name |
|
739 | :param perm: Instance of Permission, or permission_name | |
740 | """ |
|
740 | """ | |
741 | repo = self._get_repo(repo) |
|
741 | repo = self._get_repo(repo) | |
742 | group_name = self._get_user_group(group_name) |
|
742 | group_name = self._get_user_group(group_name) | |
743 | permission = self._get_perm(perm) |
|
743 | permission = self._get_perm(perm) | |
744 |
|
744 | |||
745 | # check if we have that permission already |
|
745 | # check if we have that permission already | |
746 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
746 | obj = self.sa.query(UserGroupRepoToPerm) \ | |
747 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
747 | .filter(UserGroupRepoToPerm.users_group == group_name) \ | |
748 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
748 | .filter(UserGroupRepoToPerm.repository == repo) \ | |
749 | .scalar() |
|
749 | .scalar() | |
750 |
|
750 | |||
751 | if obj is None: |
|
751 | if obj is None: | |
752 | # create new |
|
752 | # create new | |
753 | obj = UserGroupRepoToPerm() |
|
753 | obj = UserGroupRepoToPerm() | |
754 |
|
754 | |||
755 | obj.repository = repo |
|
755 | obj.repository = repo | |
756 | obj.users_group = group_name |
|
756 | obj.users_group = group_name | |
757 | obj.permission = permission |
|
757 | obj.permission = permission | |
758 | self.sa.add(obj) |
|
758 | self.sa.add(obj) | |
759 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) |
|
759 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) | |
760 | action_logger_generic( |
|
760 | action_logger_generic( | |
761 | 'granted permission: {} to usergroup: {} on repo: {}'.format( |
|
761 | 'granted permission: {} to usergroup: {} on repo: {}'.format( | |
762 | perm, group_name, repo), namespace='security.repo') |
|
762 | perm, group_name, repo), namespace='security.repo') | |
763 |
|
763 | |||
764 | return obj |
|
764 | return obj | |
765 |
|
765 | |||
766 | def revoke_user_group_permission(self, repo, group_name): |
|
766 | def revoke_user_group_permission(self, repo, group_name): | |
767 | """ |
|
767 | """ | |
768 | Revoke permission for user group on given repository |
|
768 | Revoke permission for user group on given repository | |
769 |
|
769 | |||
770 | :param repo: Instance of Repository, repository_id, or repository name |
|
770 | :param repo: Instance of Repository, repository_id, or repository name | |
771 | :param group_name: Instance of UserGroup, users_group_id, |
|
771 | :param group_name: Instance of UserGroup, users_group_id, | |
772 | or user group name |
|
772 | or user group name | |
773 | """ |
|
773 | """ | |
774 | repo = self._get_repo(repo) |
|
774 | repo = self._get_repo(repo) | |
775 | group_name = self._get_user_group(group_name) |
|
775 | group_name = self._get_user_group(group_name) | |
776 |
|
776 | |||
777 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
777 | obj = self.sa.query(UserGroupRepoToPerm) \ | |
778 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
778 | .filter(UserGroupRepoToPerm.repository == repo) \ | |
779 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
779 | .filter(UserGroupRepoToPerm.users_group == group_name) \ | |
780 | .scalar() |
|
780 | .scalar() | |
781 | if obj: |
|
781 | if obj: | |
782 | self.sa.delete(obj) |
|
782 | self.sa.delete(obj) | |
783 | log.debug('Revoked perm to %s on %s', repo, group_name) |
|
783 | log.debug('Revoked perm to %s on %s', repo, group_name) | |
784 | action_logger_generic( |
|
784 | action_logger_generic( | |
785 | 'revoked permission from usergroup: {} on repo: {}'.format( |
|
785 | 'revoked permission from usergroup: {} on repo: {}'.format( | |
786 | group_name, repo), namespace='security.repo') |
|
786 | group_name, repo), namespace='security.repo') | |
787 |
|
787 | |||
788 | def delete_stats(self, repo_name): |
|
788 | def delete_stats(self, repo_name): | |
789 | """ |
|
789 | """ | |
790 | removes stats for given repo |
|
790 | removes stats for given repo | |
791 |
|
791 | |||
792 | :param repo_name: |
|
792 | :param repo_name: | |
793 | """ |
|
793 | """ | |
794 | repo = self._get_repo(repo_name) |
|
794 | repo = self._get_repo(repo_name) | |
795 | try: |
|
795 | try: | |
796 | obj = self.sa.query(Statistics) \ |
|
796 | obj = self.sa.query(Statistics) \ | |
797 | .filter(Statistics.repository == repo).scalar() |
|
797 | .filter(Statistics.repository == repo).scalar() | |
798 | if obj: |
|
798 | if obj: | |
799 | self.sa.delete(obj) |
|
799 | self.sa.delete(obj) | |
800 | except Exception: |
|
800 | except Exception: | |
801 | log.error(traceback.format_exc()) |
|
801 | log.error(traceback.format_exc()) | |
802 | raise |
|
802 | raise | |
803 |
|
803 | |||
804 | def add_repo_field(self, repo_name, field_key, field_label, field_value='', |
|
804 | def add_repo_field(self, repo_name, field_key, field_label, field_value='', | |
805 | field_type='str', field_desc=''): |
|
805 | field_type='str', field_desc=''): | |
806 |
|
806 | |||
807 | repo = self._get_repo(repo_name) |
|
807 | repo = self._get_repo(repo_name) | |
808 |
|
808 | |||
809 | new_field = RepositoryField() |
|
809 | new_field = RepositoryField() | |
810 | new_field.repository = repo |
|
810 | new_field.repository = repo | |
811 | new_field.field_key = field_key |
|
811 | new_field.field_key = field_key | |
812 | new_field.field_type = field_type # python type |
|
812 | new_field.field_type = field_type # python type | |
813 | new_field.field_value = field_value |
|
813 | new_field.field_value = field_value | |
814 | new_field.field_desc = field_desc |
|
814 | new_field.field_desc = field_desc | |
815 | new_field.field_label = field_label |
|
815 | new_field.field_label = field_label | |
816 | self.sa.add(new_field) |
|
816 | self.sa.add(new_field) | |
817 | return new_field |
|
817 | return new_field | |
818 |
|
818 | |||
819 | def delete_repo_field(self, repo_name, field_key): |
|
819 | def delete_repo_field(self, repo_name, field_key): | |
820 | repo = self._get_repo(repo_name) |
|
820 | repo = self._get_repo(repo_name) | |
821 | field = RepositoryField.get_by_key_name(field_key, repo) |
|
821 | field = RepositoryField.get_by_key_name(field_key, repo) | |
822 | if field: |
|
822 | if field: | |
823 | self.sa.delete(field) |
|
823 | self.sa.delete(field) | |
824 |
|
824 | |||
825 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, |
|
825 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, | |
826 | clone_uri=None, repo_store_location=None, |
|
826 | clone_uri=None, repo_store_location=None, | |
827 | use_global_config=False): |
|
827 | use_global_config=False): | |
828 | """ |
|
828 | """ | |
829 | makes repository on filesystem. It's group aware means it'll create |
|
829 | makes repository on filesystem. It's group aware means it'll create | |
830 | a repository within a group, and alter the paths accordingly of |
|
830 | a repository within a group, and alter the paths accordingly of | |
831 | group location |
|
831 | group location | |
832 |
|
832 | |||
833 | :param repo_name: |
|
833 | :param repo_name: | |
834 | :param alias: |
|
834 | :param alias: | |
835 | :param parent: |
|
835 | :param parent: | |
836 | :param clone_uri: |
|
836 | :param clone_uri: | |
837 | :param repo_store_location: |
|
837 | :param repo_store_location: | |
838 | """ |
|
838 | """ | |
839 | from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group |
|
839 | from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group | |
840 | from rhodecode.model.scm import ScmModel |
|
840 | from rhodecode.model.scm import ScmModel | |
841 |
|
841 | |||
842 | if Repository.NAME_SEP in repo_name: |
|
842 | if Repository.NAME_SEP in repo_name: | |
843 | raise ValueError( |
|
843 | raise ValueError( | |
844 | 'repo_name must not contain groups got `%s`' % repo_name) |
|
844 | 'repo_name must not contain groups got `%s`' % repo_name) | |
845 |
|
845 | |||
846 | if isinstance(repo_group, RepoGroup): |
|
846 | if isinstance(repo_group, RepoGroup): | |
847 | new_parent_path = os.sep.join(repo_group.full_path_splitted) |
|
847 | new_parent_path = os.sep.join(repo_group.full_path_splitted) | |
848 | else: |
|
848 | else: | |
849 | new_parent_path = repo_group or '' |
|
849 | new_parent_path = repo_group or '' | |
850 |
|
850 | |||
851 | if repo_store_location: |
|
851 | if repo_store_location: | |
852 | _paths = [repo_store_location] |
|
852 | _paths = [repo_store_location] | |
853 | else: |
|
853 | else: | |
854 | _paths = [self.repos_path, new_parent_path, repo_name] |
|
854 | _paths = [self.repos_path, new_parent_path, repo_name] | |
855 | # we need to make it str for mercurial |
|
855 | # we need to make it str for mercurial | |
856 | repo_path = os.path.join(*map(lambda x: safe_str(x), _paths)) |
|
856 | repo_path = os.path.join(*map(lambda x: safe_str(x), _paths)) | |
857 |
|
857 | |||
858 | # check if this path is not a repository |
|
858 | # check if this path is not a repository | |
859 | if is_valid_repo(repo_path, self.repos_path): |
|
859 | if is_valid_repo(repo_path, self.repos_path): | |
860 | raise Exception('This path %s is a valid repository' % repo_path) |
|
860 | raise Exception('This path %s is a valid repository' % repo_path) | |
861 |
|
861 | |||
862 | # check if this path is a group |
|
862 | # check if this path is a group | |
863 | if is_valid_repo_group(repo_path, self.repos_path): |
|
863 | if is_valid_repo_group(repo_path, self.repos_path): | |
864 | raise Exception('This path %s is a valid group' % repo_path) |
|
864 | raise Exception('This path %s is a valid group' % repo_path) | |
865 |
|
865 | |||
866 | log.info('creating repo %s in %s from url: `%s`', |
|
866 | log.info('creating repo %s in %s from url: `%s`', | |
867 | repo_name, safe_unicode(repo_path), |
|
867 | repo_name, safe_unicode(repo_path), | |
868 | obfuscate_url_pw(clone_uri)) |
|
868 | obfuscate_url_pw(clone_uri)) | |
869 |
|
869 | |||
870 | backend = get_backend(repo_type) |
|
870 | backend = get_backend(repo_type) | |
871 |
|
871 | |||
872 | config_repo = None if use_global_config else repo_name |
|
872 | config_repo = None if use_global_config else repo_name | |
873 | if config_repo and new_parent_path: |
|
873 | if config_repo and new_parent_path: | |
874 | config_repo = Repository.NAME_SEP.join( |
|
874 | config_repo = Repository.NAME_SEP.join( | |
875 | (new_parent_path, config_repo)) |
|
875 | (new_parent_path, config_repo)) | |
876 | config = make_db_config(clear_session=False, repo=config_repo) |
|
876 | config = make_db_config(clear_session=False, repo=config_repo) | |
877 | config.set('extensions', 'largefiles', '') |
|
877 | config.set('extensions', 'largefiles', '') | |
878 |
|
878 | |||
879 | # patch and reset hooks section of UI config to not run any |
|
879 | # patch and reset hooks section of UI config to not run any | |
880 | # hooks on creating remote repo |
|
880 | # hooks on creating remote repo | |
881 | config.clear_section('hooks') |
|
881 | config.clear_section('hooks') | |
882 |
|
882 | |||
883 | # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice |
|
883 | # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice | |
884 | if repo_type == 'git': |
|
884 | if repo_type == 'git': | |
885 | repo = backend( |
|
885 | repo = backend( | |
886 | repo_path, config=config, create=True, src_url=clone_uri, |
|
886 | repo_path, config=config, create=True, src_url=clone_uri, bare=True, | |
887 |
|
|
887 | with_wire={"cache": False}) | |
888 | else: |
|
888 | else: | |
889 | repo = backend( |
|
889 | repo = backend( | |
890 |
repo_path, config=config, create=True, src_url=clone_uri |
|
890 | repo_path, config=config, create=True, src_url=clone_uri, | |
|
891 | with_wire={"cache": False}) | |||
891 |
|
892 | |||
892 | repo.install_hooks() |
|
893 | repo.install_hooks() | |
893 |
|
894 | |||
894 | log.debug('Created repo %s with %s backend', |
|
895 | log.debug('Created repo %s with %s backend', | |
895 | safe_unicode(repo_name), safe_unicode(repo_type)) |
|
896 | safe_unicode(repo_name), safe_unicode(repo_type)) | |
896 | return repo |
|
897 | return repo | |
897 |
|
898 | |||
898 | def _rename_filesystem_repo(self, old, new): |
|
899 | def _rename_filesystem_repo(self, old, new): | |
899 | """ |
|
900 | """ | |
900 | renames repository on filesystem |
|
901 | renames repository on filesystem | |
901 |
|
902 | |||
902 | :param old: old name |
|
903 | :param old: old name | |
903 | :param new: new name |
|
904 | :param new: new name | |
904 | """ |
|
905 | """ | |
905 | log.info('renaming repo from %s to %s', old, new) |
|
906 | log.info('renaming repo from %s to %s', old, new) | |
906 |
|
907 | |||
907 | old_path = os.path.join(self.repos_path, old) |
|
908 | old_path = os.path.join(self.repos_path, old) | |
908 | new_path = os.path.join(self.repos_path, new) |
|
909 | new_path = os.path.join(self.repos_path, new) | |
909 | if os.path.isdir(new_path): |
|
910 | if os.path.isdir(new_path): | |
910 | raise Exception( |
|
911 | raise Exception( | |
911 | 'Was trying to rename to already existing dir %s' % new_path |
|
912 | 'Was trying to rename to already existing dir %s' % new_path | |
912 | ) |
|
913 | ) | |
913 | shutil.move(old_path, new_path) |
|
914 | shutil.move(old_path, new_path) | |
914 |
|
915 | |||
915 | def _delete_filesystem_repo(self, repo): |
|
916 | def _delete_filesystem_repo(self, repo): | |
916 | """ |
|
917 | """ | |
917 | removes repo from filesystem, the removal is acctually made by |
|
918 | removes repo from filesystem, the removal is acctually made by | |
918 | added rm__ prefix into dir, and rename internat .hg/.git dirs so this |
|
919 | added rm__ prefix into dir, and rename internat .hg/.git dirs so this | |
919 | repository is no longer valid for rhodecode, can be undeleted later on |
|
920 | repository is no longer valid for rhodecode, can be undeleted later on | |
920 | by reverting the renames on this repository |
|
921 | by reverting the renames on this repository | |
921 |
|
922 | |||
922 | :param repo: repo object |
|
923 | :param repo: repo object | |
923 | """ |
|
924 | """ | |
924 | rm_path = os.path.join(self.repos_path, repo.repo_name) |
|
925 | rm_path = os.path.join(self.repos_path, repo.repo_name) | |
925 | repo_group = repo.group |
|
926 | repo_group = repo.group | |
926 | log.info("Removing repository %s", rm_path) |
|
927 | log.info("Removing repository %s", rm_path) | |
927 | # disable hg/git internal that it doesn't get detected as repo |
|
928 | # disable hg/git internal that it doesn't get detected as repo | |
928 | alias = repo.repo_type |
|
929 | alias = repo.repo_type | |
929 |
|
930 | |||
930 | config = make_db_config(clear_session=False) |
|
931 | config = make_db_config(clear_session=False) | |
931 | config.set('extensions', 'largefiles', '') |
|
932 | config.set('extensions', 'largefiles', '') | |
932 | bare = getattr(repo.scm_instance(config=config), 'bare', False) |
|
933 | bare = getattr(repo.scm_instance(config=config), 'bare', False) | |
933 |
|
934 | |||
934 | # skip this for bare git repos |
|
935 | # skip this for bare git repos | |
935 | if not bare: |
|
936 | if not bare: | |
936 | # disable VCS repo |
|
937 | # disable VCS repo | |
937 | vcs_path = os.path.join(rm_path, '.%s' % alias) |
|
938 | vcs_path = os.path.join(rm_path, '.%s' % alias) | |
938 | if os.path.exists(vcs_path): |
|
939 | if os.path.exists(vcs_path): | |
939 | shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias)) |
|
940 | shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias)) | |
940 |
|
941 | |||
941 | _now = datetime.datetime.now() |
|
942 | _now = datetime.datetime.now() | |
942 | _ms = str(_now.microsecond).rjust(6, '0') |
|
943 | _ms = str(_now.microsecond).rjust(6, '0') | |
943 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), |
|
944 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), | |
944 | repo.just_name) |
|
945 | repo.just_name) | |
945 | if repo_group: |
|
946 | if repo_group: | |
946 | # if repository is in group, prefix the removal path with the group |
|
947 | # if repository is in group, prefix the removal path with the group | |
947 | args = repo_group.full_path_splitted + [_d] |
|
948 | args = repo_group.full_path_splitted + [_d] | |
948 | _d = os.path.join(*args) |
|
949 | _d = os.path.join(*args) | |
949 |
|
950 | |||
950 | if os.path.isdir(rm_path): |
|
951 | if os.path.isdir(rm_path): | |
951 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) |
|
952 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) | |
952 |
|
953 | |||
953 | # finally cleanup diff-cache if it exists |
|
954 | # finally cleanup diff-cache if it exists | |
954 | cached_diffs_dir = repo.cached_diffs_dir |
|
955 | cached_diffs_dir = repo.cached_diffs_dir | |
955 | if os.path.isdir(cached_diffs_dir): |
|
956 | if os.path.isdir(cached_diffs_dir): | |
956 | shutil.rmtree(cached_diffs_dir) |
|
957 | shutil.rmtree(cached_diffs_dir) | |
957 |
|
958 | |||
958 |
|
959 | |||
959 | class ReadmeFinder: |
|
960 | class ReadmeFinder: | |
960 | """ |
|
961 | """ | |
961 | Utility which knows how to find a readme for a specific commit. |
|
962 | Utility which knows how to find a readme for a specific commit. | |
962 |
|
963 | |||
963 | The main idea is that this is a configurable algorithm. When creating an |
|
964 | The main idea is that this is a configurable algorithm. When creating an | |
964 | instance you can define parameters, currently only the `default_renderer`. |
|
965 | instance you can define parameters, currently only the `default_renderer`. | |
965 | Based on this configuration the method :meth:`search` behaves slightly |
|
966 | Based on this configuration the method :meth:`search` behaves slightly | |
966 | different. |
|
967 | different. | |
967 | """ |
|
968 | """ | |
968 |
|
969 | |||
969 | readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE) |
|
970 | readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE) | |
970 | path_re = re.compile(r'^docs?', re.IGNORECASE) |
|
971 | path_re = re.compile(r'^docs?', re.IGNORECASE) | |
971 |
|
972 | |||
972 | default_priorities = { |
|
973 | default_priorities = { | |
973 | None: 0, |
|
974 | None: 0, | |
974 | '.text': 2, |
|
975 | '.text': 2, | |
975 | '.txt': 3, |
|
976 | '.txt': 3, | |
976 | '.rst': 1, |
|
977 | '.rst': 1, | |
977 | '.rest': 2, |
|
978 | '.rest': 2, | |
978 | '.md': 1, |
|
979 | '.md': 1, | |
979 | '.mkdn': 2, |
|
980 | '.mkdn': 2, | |
980 | '.mdown': 3, |
|
981 | '.mdown': 3, | |
981 | '.markdown': 4, |
|
982 | '.markdown': 4, | |
982 | } |
|
983 | } | |
983 |
|
984 | |||
984 | path_priority = { |
|
985 | path_priority = { | |
985 | 'doc': 0, |
|
986 | 'doc': 0, | |
986 | 'docs': 1, |
|
987 | 'docs': 1, | |
987 | } |
|
988 | } | |
988 |
|
989 | |||
989 | FALLBACK_PRIORITY = 99 |
|
990 | FALLBACK_PRIORITY = 99 | |
990 |
|
991 | |||
991 | RENDERER_TO_EXTENSION = { |
|
992 | RENDERER_TO_EXTENSION = { | |
992 | 'rst': ['.rst', '.rest'], |
|
993 | 'rst': ['.rst', '.rest'], | |
993 | 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'], |
|
994 | 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'], | |
994 | } |
|
995 | } | |
995 |
|
996 | |||
996 | def __init__(self, default_renderer=None): |
|
997 | def __init__(self, default_renderer=None): | |
997 | self._default_renderer = default_renderer |
|
998 | self._default_renderer = default_renderer | |
998 | self._renderer_extensions = self.RENDERER_TO_EXTENSION.get( |
|
999 | self._renderer_extensions = self.RENDERER_TO_EXTENSION.get( | |
999 | default_renderer, []) |
|
1000 | default_renderer, []) | |
1000 |
|
1001 | |||
1001 | def search(self, commit, path='/'): |
|
1002 | def search(self, commit, path='/'): | |
1002 | """ |
|
1003 | """ | |
1003 | Find a readme in the given `commit`. |
|
1004 | Find a readme in the given `commit`. | |
1004 | """ |
|
1005 | """ | |
1005 | nodes = commit.get_nodes(path) |
|
1006 | nodes = commit.get_nodes(path) | |
1006 | matches = self._match_readmes(nodes) |
|
1007 | matches = self._match_readmes(nodes) | |
1007 | matches = self._sort_according_to_priority(matches) |
|
1008 | matches = self._sort_according_to_priority(matches) | |
1008 | if matches: |
|
1009 | if matches: | |
1009 | return matches[0].node |
|
1010 | return matches[0].node | |
1010 |
|
1011 | |||
1011 | paths = self._match_paths(nodes) |
|
1012 | paths = self._match_paths(nodes) | |
1012 | paths = self._sort_paths_according_to_priority(paths) |
|
1013 | paths = self._sort_paths_according_to_priority(paths) | |
1013 | for path in paths: |
|
1014 | for path in paths: | |
1014 | match = self.search(commit, path=path) |
|
1015 | match = self.search(commit, path=path) | |
1015 | if match: |
|
1016 | if match: | |
1016 | return match |
|
1017 | return match | |
1017 |
|
1018 | |||
1018 | return None |
|
1019 | return None | |
1019 |
|
1020 | |||
1020 | def _match_readmes(self, nodes): |
|
1021 | def _match_readmes(self, nodes): | |
1021 | for node in nodes: |
|
1022 | for node in nodes: | |
1022 | if not node.is_file(): |
|
1023 | if not node.is_file(): | |
1023 | continue |
|
1024 | continue | |
1024 | path = node.path.rsplit('/', 1)[-1] |
|
1025 | path = node.path.rsplit('/', 1)[-1] | |
1025 | match = self.readme_re.match(path) |
|
1026 | match = self.readme_re.match(path) | |
1026 | if match: |
|
1027 | if match: | |
1027 | extension = match.group(1) |
|
1028 | extension = match.group(1) | |
1028 | yield ReadmeMatch(node, match, self._priority(extension)) |
|
1029 | yield ReadmeMatch(node, match, self._priority(extension)) | |
1029 |
|
1030 | |||
1030 | def _match_paths(self, nodes): |
|
1031 | def _match_paths(self, nodes): | |
1031 | for node in nodes: |
|
1032 | for node in nodes: | |
1032 | if not node.is_dir(): |
|
1033 | if not node.is_dir(): | |
1033 | continue |
|
1034 | continue | |
1034 | match = self.path_re.match(node.path) |
|
1035 | match = self.path_re.match(node.path) | |
1035 | if match: |
|
1036 | if match: | |
1036 | yield node.path |
|
1037 | yield node.path | |
1037 |
|
1038 | |||
1038 | def _priority(self, extension): |
|
1039 | def _priority(self, extension): | |
1039 | renderer_priority = ( |
|
1040 | renderer_priority = ( | |
1040 | 0 if extension in self._renderer_extensions else 1) |
|
1041 | 0 if extension in self._renderer_extensions else 1) | |
1041 | extension_priority = self.default_priorities.get( |
|
1042 | extension_priority = self.default_priorities.get( | |
1042 | extension, self.FALLBACK_PRIORITY) |
|
1043 | extension, self.FALLBACK_PRIORITY) | |
1043 | return (renderer_priority, extension_priority) |
|
1044 | return (renderer_priority, extension_priority) | |
1044 |
|
1045 | |||
1045 | def _sort_according_to_priority(self, matches): |
|
1046 | def _sort_according_to_priority(self, matches): | |
1046 |
|
1047 | |||
1047 | def priority_and_path(match): |
|
1048 | def priority_and_path(match): | |
1048 | return (match.priority, match.path) |
|
1049 | return (match.priority, match.path) | |
1049 |
|
1050 | |||
1050 | return sorted(matches, key=priority_and_path) |
|
1051 | return sorted(matches, key=priority_and_path) | |
1051 |
|
1052 | |||
1052 | def _sort_paths_according_to_priority(self, paths): |
|
1053 | def _sort_paths_according_to_priority(self, paths): | |
1053 |
|
1054 | |||
1054 | def priority_and_path(path): |
|
1055 | def priority_and_path(path): | |
1055 | return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path) |
|
1056 | return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path) | |
1056 |
|
1057 | |||
1057 | return sorted(paths, key=priority_and_path) |
|
1058 | return sorted(paths, key=priority_and_path) | |
1058 |
|
1059 | |||
1059 |
|
1060 | |||
1060 | class ReadmeMatch: |
|
1061 | class ReadmeMatch: | |
1061 |
|
1062 | |||
1062 | def __init__(self, node, match, priority): |
|
1063 | def __init__(self, node, match, priority): | |
1063 | self.node = node |
|
1064 | self.node = node | |
1064 | self._match = match |
|
1065 | self._match = match | |
1065 | self.priority = priority |
|
1066 | self.priority = priority | |
1066 |
|
1067 | |||
1067 | @property |
|
1068 | @property | |
1068 | def path(self): |
|
1069 | def path(self): | |
1069 | return self.node.path |
|
1070 | return self.node.path | |
1070 |
|
1071 | |||
1071 | def __repr__(self): |
|
1072 | def __repr__(self): | |
1072 | return '<ReadmeMatch {} priority={}'.format(self.path, self.priority) |
|
1073 | return '<ReadmeMatch {} priority={}'.format(self.path, self.priority) |
@@ -1,941 +1,942 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Scm model for RhodeCode |
|
22 | Scm model for RhodeCode | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import os.path |
|
25 | import os.path | |
26 | import traceback |
|
26 | import traceback | |
27 | import logging |
|
27 | import logging | |
28 | import cStringIO |
|
28 | import cStringIO | |
29 |
|
29 | |||
30 | from sqlalchemy import func |
|
30 | from sqlalchemy import func | |
31 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
31 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
32 |
|
32 | |||
33 | import rhodecode |
|
33 | import rhodecode | |
34 | from rhodecode.lib.vcs import get_backend |
|
34 | from rhodecode.lib.vcs import get_backend | |
35 | from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError |
|
35 | from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError | |
36 | from rhodecode.lib.vcs.nodes import FileNode |
|
36 | from rhodecode.lib.vcs.nodes import FileNode | |
37 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
37 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
38 | from rhodecode.lib import helpers as h, rc_cache |
|
38 | from rhodecode.lib import helpers as h, rc_cache | |
39 | from rhodecode.lib.auth import ( |
|
39 | from rhodecode.lib.auth import ( | |
40 | HasRepoPermissionAny, HasRepoGroupPermissionAny, |
|
40 | HasRepoPermissionAny, HasRepoGroupPermissionAny, | |
41 | HasUserGroupPermissionAny) |
|
41 | HasUserGroupPermissionAny) | |
42 | from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError |
|
42 | from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError | |
43 | from rhodecode.lib import hooks_utils |
|
43 | from rhodecode.lib import hooks_utils | |
44 | from rhodecode.lib.utils import ( |
|
44 | from rhodecode.lib.utils import ( | |
45 | get_filesystem_repos, make_db_config) |
|
45 | get_filesystem_repos, make_db_config) | |
46 | from rhodecode.lib.utils2 import (safe_str, safe_unicode) |
|
46 | from rhodecode.lib.utils2 import (safe_str, safe_unicode) | |
47 | from rhodecode.lib.system_info import get_system_info |
|
47 | from rhodecode.lib.system_info import get_system_info | |
48 | from rhodecode.model import BaseModel |
|
48 | from rhodecode.model import BaseModel | |
49 | from rhodecode.model.db import ( |
|
49 | from rhodecode.model.db import ( | |
50 | Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup, |
|
50 | Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup, | |
51 | PullRequest) |
|
51 | PullRequest) | |
52 | from rhodecode.model.settings import VcsSettingsModel |
|
52 | from rhodecode.model.settings import VcsSettingsModel | |
53 | from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl |
|
53 | from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl | |
54 |
|
54 | |||
55 | log = logging.getLogger(__name__) |
|
55 | log = logging.getLogger(__name__) | |
56 |
|
56 | |||
57 |
|
57 | |||
58 | class UserTemp(object): |
|
58 | class UserTemp(object): | |
59 | def __init__(self, user_id): |
|
59 | def __init__(self, user_id): | |
60 | self.user_id = user_id |
|
60 | self.user_id = user_id | |
61 |
|
61 | |||
62 | def __repr__(self): |
|
62 | def __repr__(self): | |
63 | return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id) |
|
63 | return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id) | |
64 |
|
64 | |||
65 |
|
65 | |||
66 | class RepoTemp(object): |
|
66 | class RepoTemp(object): | |
67 | def __init__(self, repo_id): |
|
67 | def __init__(self, repo_id): | |
68 | self.repo_id = repo_id |
|
68 | self.repo_id = repo_id | |
69 |
|
69 | |||
70 | def __repr__(self): |
|
70 | def __repr__(self): | |
71 | return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id) |
|
71 | return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id) | |
72 |
|
72 | |||
73 |
|
73 | |||
74 | class SimpleCachedRepoList(object): |
|
74 | class SimpleCachedRepoList(object): | |
75 | """ |
|
75 | """ | |
76 | Lighter version of of iteration of repos without the scm initialisation, |
|
76 | Lighter version of of iteration of repos without the scm initialisation, | |
77 | and with cache usage |
|
77 | and with cache usage | |
78 | """ |
|
78 | """ | |
79 | def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None): |
|
79 | def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None): | |
80 | self.db_repo_list = db_repo_list |
|
80 | self.db_repo_list = db_repo_list | |
81 | self.repos_path = repos_path |
|
81 | self.repos_path = repos_path | |
82 | self.order_by = order_by |
|
82 | self.order_by = order_by | |
83 | self.reversed = (order_by or '').startswith('-') |
|
83 | self.reversed = (order_by or '').startswith('-') | |
84 | if not perm_set: |
|
84 | if not perm_set: | |
85 | perm_set = ['repository.read', 'repository.write', |
|
85 | perm_set = ['repository.read', 'repository.write', | |
86 | 'repository.admin'] |
|
86 | 'repository.admin'] | |
87 | self.perm_set = perm_set |
|
87 | self.perm_set = perm_set | |
88 |
|
88 | |||
89 | def __len__(self): |
|
89 | def __len__(self): | |
90 | return len(self.db_repo_list) |
|
90 | return len(self.db_repo_list) | |
91 |
|
91 | |||
92 | def __repr__(self): |
|
92 | def __repr__(self): | |
93 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) |
|
93 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) | |
94 |
|
94 | |||
95 | def __iter__(self): |
|
95 | def __iter__(self): | |
96 | for dbr in self.db_repo_list: |
|
96 | for dbr in self.db_repo_list: | |
97 | # check permission at this level |
|
97 | # check permission at this level | |
98 | has_perm = HasRepoPermissionAny(*self.perm_set)( |
|
98 | has_perm = HasRepoPermissionAny(*self.perm_set)( | |
99 | dbr.repo_name, 'SimpleCachedRepoList check') |
|
99 | dbr.repo_name, 'SimpleCachedRepoList check') | |
100 | if not has_perm: |
|
100 | if not has_perm: | |
101 | continue |
|
101 | continue | |
102 |
|
102 | |||
103 | tmp_d = { |
|
103 | tmp_d = { | |
104 | 'name': dbr.repo_name, |
|
104 | 'name': dbr.repo_name, | |
105 | 'dbrepo': dbr.get_dict(), |
|
105 | 'dbrepo': dbr.get_dict(), | |
106 | 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {} |
|
106 | 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {} | |
107 | } |
|
107 | } | |
108 | yield tmp_d |
|
108 | yield tmp_d | |
109 |
|
109 | |||
110 |
|
110 | |||
111 | class _PermCheckIterator(object): |
|
111 | class _PermCheckIterator(object): | |
112 |
|
112 | |||
113 | def __init__( |
|
113 | def __init__( | |
114 | self, obj_list, obj_attr, perm_set, perm_checker, |
|
114 | self, obj_list, obj_attr, perm_set, perm_checker, | |
115 | extra_kwargs=None): |
|
115 | extra_kwargs=None): | |
116 | """ |
|
116 | """ | |
117 | Creates iterator from given list of objects, additionally |
|
117 | Creates iterator from given list of objects, additionally | |
118 | checking permission for them from perm_set var |
|
118 | checking permission for them from perm_set var | |
119 |
|
119 | |||
120 | :param obj_list: list of db objects |
|
120 | :param obj_list: list of db objects | |
121 | :param obj_attr: attribute of object to pass into perm_checker |
|
121 | :param obj_attr: attribute of object to pass into perm_checker | |
122 | :param perm_set: list of permissions to check |
|
122 | :param perm_set: list of permissions to check | |
123 | :param perm_checker: callable to check permissions against |
|
123 | :param perm_checker: callable to check permissions against | |
124 | """ |
|
124 | """ | |
125 | self.obj_list = obj_list |
|
125 | self.obj_list = obj_list | |
126 | self.obj_attr = obj_attr |
|
126 | self.obj_attr = obj_attr | |
127 | self.perm_set = perm_set |
|
127 | self.perm_set = perm_set | |
128 | self.perm_checker = perm_checker |
|
128 | self.perm_checker = perm_checker | |
129 | self.extra_kwargs = extra_kwargs or {} |
|
129 | self.extra_kwargs = extra_kwargs or {} | |
130 |
|
130 | |||
131 | def __len__(self): |
|
131 | def __len__(self): | |
132 | return len(self.obj_list) |
|
132 | return len(self.obj_list) | |
133 |
|
133 | |||
134 | def __repr__(self): |
|
134 | def __repr__(self): | |
135 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) |
|
135 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) | |
136 |
|
136 | |||
137 | def __iter__(self): |
|
137 | def __iter__(self): | |
138 | checker = self.perm_checker(*self.perm_set) |
|
138 | checker = self.perm_checker(*self.perm_set) | |
139 | for db_obj in self.obj_list: |
|
139 | for db_obj in self.obj_list: | |
140 | # check permission at this level |
|
140 | # check permission at this level | |
141 | name = getattr(db_obj, self.obj_attr, None) |
|
141 | name = getattr(db_obj, self.obj_attr, None) | |
142 | if not checker(name, self.__class__.__name__, **self.extra_kwargs): |
|
142 | if not checker(name, self.__class__.__name__, **self.extra_kwargs): | |
143 | continue |
|
143 | continue | |
144 |
|
144 | |||
145 | yield db_obj |
|
145 | yield db_obj | |
146 |
|
146 | |||
147 |
|
147 | |||
148 | class RepoList(_PermCheckIterator): |
|
148 | class RepoList(_PermCheckIterator): | |
149 |
|
149 | |||
150 | def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None): |
|
150 | def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None): | |
151 | if not perm_set: |
|
151 | if not perm_set: | |
152 | perm_set = [ |
|
152 | perm_set = [ | |
153 | 'repository.read', 'repository.write', 'repository.admin'] |
|
153 | 'repository.read', 'repository.write', 'repository.admin'] | |
154 |
|
154 | |||
155 | super(RepoList, self).__init__( |
|
155 | super(RepoList, self).__init__( | |
156 | obj_list=db_repo_list, |
|
156 | obj_list=db_repo_list, | |
157 | obj_attr='repo_name', perm_set=perm_set, |
|
157 | obj_attr='repo_name', perm_set=perm_set, | |
158 | perm_checker=HasRepoPermissionAny, |
|
158 | perm_checker=HasRepoPermissionAny, | |
159 | extra_kwargs=extra_kwargs) |
|
159 | extra_kwargs=extra_kwargs) | |
160 |
|
160 | |||
161 |
|
161 | |||
162 | class RepoGroupList(_PermCheckIterator): |
|
162 | class RepoGroupList(_PermCheckIterator): | |
163 |
|
163 | |||
164 | def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None): |
|
164 | def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None): | |
165 | if not perm_set: |
|
165 | if not perm_set: | |
166 | perm_set = ['group.read', 'group.write', 'group.admin'] |
|
166 | perm_set = ['group.read', 'group.write', 'group.admin'] | |
167 |
|
167 | |||
168 | super(RepoGroupList, self).__init__( |
|
168 | super(RepoGroupList, self).__init__( | |
169 | obj_list=db_repo_group_list, |
|
169 | obj_list=db_repo_group_list, | |
170 | obj_attr='group_name', perm_set=perm_set, |
|
170 | obj_attr='group_name', perm_set=perm_set, | |
171 | perm_checker=HasRepoGroupPermissionAny, |
|
171 | perm_checker=HasRepoGroupPermissionAny, | |
172 | extra_kwargs=extra_kwargs) |
|
172 | extra_kwargs=extra_kwargs) | |
173 |
|
173 | |||
174 |
|
174 | |||
175 | class UserGroupList(_PermCheckIterator): |
|
175 | class UserGroupList(_PermCheckIterator): | |
176 |
|
176 | |||
177 | def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None): |
|
177 | def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None): | |
178 | if not perm_set: |
|
178 | if not perm_set: | |
179 | perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin'] |
|
179 | perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin'] | |
180 |
|
180 | |||
181 | super(UserGroupList, self).__init__( |
|
181 | super(UserGroupList, self).__init__( | |
182 | obj_list=db_user_group_list, |
|
182 | obj_list=db_user_group_list, | |
183 | obj_attr='users_group_name', perm_set=perm_set, |
|
183 | obj_attr='users_group_name', perm_set=perm_set, | |
184 | perm_checker=HasUserGroupPermissionAny, |
|
184 | perm_checker=HasUserGroupPermissionAny, | |
185 | extra_kwargs=extra_kwargs) |
|
185 | extra_kwargs=extra_kwargs) | |
186 |
|
186 | |||
187 |
|
187 | |||
188 | class ScmModel(BaseModel): |
|
188 | class ScmModel(BaseModel): | |
189 | """ |
|
189 | """ | |
190 | Generic Scm Model |
|
190 | Generic Scm Model | |
191 | """ |
|
191 | """ | |
192 |
|
192 | |||
193 | @LazyProperty |
|
193 | @LazyProperty | |
194 | def repos_path(self): |
|
194 | def repos_path(self): | |
195 | """ |
|
195 | """ | |
196 | Gets the repositories root path from database |
|
196 | Gets the repositories root path from database | |
197 | """ |
|
197 | """ | |
198 |
|
198 | |||
199 | settings_model = VcsSettingsModel(sa=self.sa) |
|
199 | settings_model = VcsSettingsModel(sa=self.sa) | |
200 | return settings_model.get_repos_location() |
|
200 | return settings_model.get_repos_location() | |
201 |
|
201 | |||
202 | def repo_scan(self, repos_path=None): |
|
202 | def repo_scan(self, repos_path=None): | |
203 | """ |
|
203 | """ | |
204 | Listing of repositories in given path. This path should not be a |
|
204 | Listing of repositories in given path. This path should not be a | |
205 | repository itself. Return a dictionary of repository objects |
|
205 | repository itself. Return a dictionary of repository objects | |
206 |
|
206 | |||
207 | :param repos_path: path to directory containing repositories |
|
207 | :param repos_path: path to directory containing repositories | |
208 | """ |
|
208 | """ | |
209 |
|
209 | |||
210 | if repos_path is None: |
|
210 | if repos_path is None: | |
211 | repos_path = self.repos_path |
|
211 | repos_path = self.repos_path | |
212 |
|
212 | |||
213 | log.info('scanning for repositories in %s', repos_path) |
|
213 | log.info('scanning for repositories in %s', repos_path) | |
214 |
|
214 | |||
215 | config = make_db_config() |
|
215 | config = make_db_config() | |
216 | config.set('extensions', 'largefiles', '') |
|
216 | config.set('extensions', 'largefiles', '') | |
217 | repos = {} |
|
217 | repos = {} | |
218 |
|
218 | |||
219 | for name, path in get_filesystem_repos(repos_path, recursive=True): |
|
219 | for name, path in get_filesystem_repos(repos_path, recursive=True): | |
220 | # name need to be decomposed and put back together using the / |
|
220 | # name need to be decomposed and put back together using the / | |
221 | # since this is internal storage separator for rhodecode |
|
221 | # since this is internal storage separator for rhodecode | |
222 | name = Repository.normalize_repo_name(name) |
|
222 | name = Repository.normalize_repo_name(name) | |
223 |
|
223 | |||
224 | try: |
|
224 | try: | |
225 | if name in repos: |
|
225 | if name in repos: | |
226 | raise RepositoryError('Duplicate repository name %s ' |
|
226 | raise RepositoryError('Duplicate repository name %s ' | |
227 | 'found in %s' % (name, path)) |
|
227 | 'found in %s' % (name, path)) | |
228 | elif path[0] in rhodecode.BACKENDS: |
|
228 | elif path[0] in rhodecode.BACKENDS: | |
229 |
|
|
229 | backend = get_backend(path[0]) | |
230 |
repos[name] = |
|
230 | repos[name] = backend(path[1], config=config, | |
|
231 | with_wire={"cache": False}) | |||
231 | except OSError: |
|
232 | except OSError: | |
232 | continue |
|
233 | continue | |
233 | log.debug('found %s paths with repositories', len(repos)) |
|
234 | log.debug('found %s paths with repositories', len(repos)) | |
234 | return repos |
|
235 | return repos | |
235 |
|
236 | |||
236 | def get_repos(self, all_repos=None, sort_key=None): |
|
237 | def get_repos(self, all_repos=None, sort_key=None): | |
237 | """ |
|
238 | """ | |
238 | Get all repositories from db and for each repo create it's |
|
239 | Get all repositories from db and for each repo create it's | |
239 | backend instance and fill that backed with information from database |
|
240 | backend instance and fill that backed with information from database | |
240 |
|
241 | |||
241 | :param all_repos: list of repository names as strings |
|
242 | :param all_repos: list of repository names as strings | |
242 | give specific repositories list, good for filtering |
|
243 | give specific repositories list, good for filtering | |
243 |
|
244 | |||
244 | :param sort_key: initial sorting of repositories |
|
245 | :param sort_key: initial sorting of repositories | |
245 | """ |
|
246 | """ | |
246 | if all_repos is None: |
|
247 | if all_repos is None: | |
247 | all_repos = self.sa.query(Repository)\ |
|
248 | all_repos = self.sa.query(Repository)\ | |
248 | .filter(Repository.group_id == None)\ |
|
249 | .filter(Repository.group_id == None)\ | |
249 | .order_by(func.lower(Repository.repo_name)).all() |
|
250 | .order_by(func.lower(Repository.repo_name)).all() | |
250 | repo_iter = SimpleCachedRepoList( |
|
251 | repo_iter = SimpleCachedRepoList( | |
251 | all_repos, repos_path=self.repos_path, order_by=sort_key) |
|
252 | all_repos, repos_path=self.repos_path, order_by=sort_key) | |
252 | return repo_iter |
|
253 | return repo_iter | |
253 |
|
254 | |||
254 | def get_repo_groups(self, all_groups=None): |
|
255 | def get_repo_groups(self, all_groups=None): | |
255 | if all_groups is None: |
|
256 | if all_groups is None: | |
256 | all_groups = RepoGroup.query()\ |
|
257 | all_groups = RepoGroup.query()\ | |
257 | .filter(RepoGroup.group_parent_id == None).all() |
|
258 | .filter(RepoGroup.group_parent_id == None).all() | |
258 | return [x for x in RepoGroupList(all_groups)] |
|
259 | return [x for x in RepoGroupList(all_groups)] | |
259 |
|
260 | |||
260 | def mark_for_invalidation(self, repo_name, delete=False): |
|
261 | def mark_for_invalidation(self, repo_name, delete=False): | |
261 | """ |
|
262 | """ | |
262 | Mark caches of this repo invalid in the database. `delete` flag |
|
263 | Mark caches of this repo invalid in the database. `delete` flag | |
263 | removes the cache entries |
|
264 | removes the cache entries | |
264 |
|
265 | |||
265 | :param repo_name: the repo_name for which caches should be marked |
|
266 | :param repo_name: the repo_name for which caches should be marked | |
266 | invalid, or deleted |
|
267 | invalid, or deleted | |
267 | :param delete: delete the entry keys instead of setting bool |
|
268 | :param delete: delete the entry keys instead of setting bool | |
268 | flag on them, and also purge caches used by the dogpile |
|
269 | flag on them, and also purge caches used by the dogpile | |
269 | """ |
|
270 | """ | |
270 | repo = Repository.get_by_repo_name(repo_name) |
|
271 | repo = Repository.get_by_repo_name(repo_name) | |
271 |
|
272 | |||
272 | if repo: |
|
273 | if repo: | |
273 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( |
|
274 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( | |
274 | repo_id=repo.repo_id) |
|
275 | repo_id=repo.repo_id) | |
275 | CacheKey.set_invalidate(invalidation_namespace, delete=delete) |
|
276 | CacheKey.set_invalidate(invalidation_namespace, delete=delete) | |
276 |
|
277 | |||
277 | repo_id = repo.repo_id |
|
278 | repo_id = repo.repo_id | |
278 | config = repo._config |
|
279 | config = repo._config | |
279 | config.set('extensions', 'largefiles', '') |
|
280 | config.set('extensions', 'largefiles', '') | |
280 | repo.update_commit_cache(config=config, cs_cache=None) |
|
281 | repo.update_commit_cache(config=config, cs_cache=None) | |
281 | if delete: |
|
282 | if delete: | |
282 | cache_namespace_uid = 'cache_repo.{}'.format(repo_id) |
|
283 | cache_namespace_uid = 'cache_repo.{}'.format(repo_id) | |
283 | rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid) |
|
284 | rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid) | |
284 |
|
285 | |||
285 | def toggle_following_repo(self, follow_repo_id, user_id): |
|
286 | def toggle_following_repo(self, follow_repo_id, user_id): | |
286 |
|
287 | |||
287 | f = self.sa.query(UserFollowing)\ |
|
288 | f = self.sa.query(UserFollowing)\ | |
288 | .filter(UserFollowing.follows_repo_id == follow_repo_id)\ |
|
289 | .filter(UserFollowing.follows_repo_id == follow_repo_id)\ | |
289 | .filter(UserFollowing.user_id == user_id).scalar() |
|
290 | .filter(UserFollowing.user_id == user_id).scalar() | |
290 |
|
291 | |||
291 | if f is not None: |
|
292 | if f is not None: | |
292 | try: |
|
293 | try: | |
293 | self.sa.delete(f) |
|
294 | self.sa.delete(f) | |
294 | return |
|
295 | return | |
295 | except Exception: |
|
296 | except Exception: | |
296 | log.error(traceback.format_exc()) |
|
297 | log.error(traceback.format_exc()) | |
297 | raise |
|
298 | raise | |
298 |
|
299 | |||
299 | try: |
|
300 | try: | |
300 | f = UserFollowing() |
|
301 | f = UserFollowing() | |
301 | f.user_id = user_id |
|
302 | f.user_id = user_id | |
302 | f.follows_repo_id = follow_repo_id |
|
303 | f.follows_repo_id = follow_repo_id | |
303 | self.sa.add(f) |
|
304 | self.sa.add(f) | |
304 | except Exception: |
|
305 | except Exception: | |
305 | log.error(traceback.format_exc()) |
|
306 | log.error(traceback.format_exc()) | |
306 | raise |
|
307 | raise | |
307 |
|
308 | |||
308 | def toggle_following_user(self, follow_user_id, user_id): |
|
309 | def toggle_following_user(self, follow_user_id, user_id): | |
309 | f = self.sa.query(UserFollowing)\ |
|
310 | f = self.sa.query(UserFollowing)\ | |
310 | .filter(UserFollowing.follows_user_id == follow_user_id)\ |
|
311 | .filter(UserFollowing.follows_user_id == follow_user_id)\ | |
311 | .filter(UserFollowing.user_id == user_id).scalar() |
|
312 | .filter(UserFollowing.user_id == user_id).scalar() | |
312 |
|
313 | |||
313 | if f is not None: |
|
314 | if f is not None: | |
314 | try: |
|
315 | try: | |
315 | self.sa.delete(f) |
|
316 | self.sa.delete(f) | |
316 | return |
|
317 | return | |
317 | except Exception: |
|
318 | except Exception: | |
318 | log.error(traceback.format_exc()) |
|
319 | log.error(traceback.format_exc()) | |
319 | raise |
|
320 | raise | |
320 |
|
321 | |||
321 | try: |
|
322 | try: | |
322 | f = UserFollowing() |
|
323 | f = UserFollowing() | |
323 | f.user_id = user_id |
|
324 | f.user_id = user_id | |
324 | f.follows_user_id = follow_user_id |
|
325 | f.follows_user_id = follow_user_id | |
325 | self.sa.add(f) |
|
326 | self.sa.add(f) | |
326 | except Exception: |
|
327 | except Exception: | |
327 | log.error(traceback.format_exc()) |
|
328 | log.error(traceback.format_exc()) | |
328 | raise |
|
329 | raise | |
329 |
|
330 | |||
330 | def is_following_repo(self, repo_name, user_id, cache=False): |
|
331 | def is_following_repo(self, repo_name, user_id, cache=False): | |
331 | r = self.sa.query(Repository)\ |
|
332 | r = self.sa.query(Repository)\ | |
332 | .filter(Repository.repo_name == repo_name).scalar() |
|
333 | .filter(Repository.repo_name == repo_name).scalar() | |
333 |
|
334 | |||
334 | f = self.sa.query(UserFollowing)\ |
|
335 | f = self.sa.query(UserFollowing)\ | |
335 | .filter(UserFollowing.follows_repository == r)\ |
|
336 | .filter(UserFollowing.follows_repository == r)\ | |
336 | .filter(UserFollowing.user_id == user_id).scalar() |
|
337 | .filter(UserFollowing.user_id == user_id).scalar() | |
337 |
|
338 | |||
338 | return f is not None |
|
339 | return f is not None | |
339 |
|
340 | |||
340 | def is_following_user(self, username, user_id, cache=False): |
|
341 | def is_following_user(self, username, user_id, cache=False): | |
341 | u = User.get_by_username(username) |
|
342 | u = User.get_by_username(username) | |
342 |
|
343 | |||
343 | f = self.sa.query(UserFollowing)\ |
|
344 | f = self.sa.query(UserFollowing)\ | |
344 | .filter(UserFollowing.follows_user == u)\ |
|
345 | .filter(UserFollowing.follows_user == u)\ | |
345 | .filter(UserFollowing.user_id == user_id).scalar() |
|
346 | .filter(UserFollowing.user_id == user_id).scalar() | |
346 |
|
347 | |||
347 | return f is not None |
|
348 | return f is not None | |
348 |
|
349 | |||
349 | def get_followers(self, repo): |
|
350 | def get_followers(self, repo): | |
350 | repo = self._get_repo(repo) |
|
351 | repo = self._get_repo(repo) | |
351 |
|
352 | |||
352 | return self.sa.query(UserFollowing)\ |
|
353 | return self.sa.query(UserFollowing)\ | |
353 | .filter(UserFollowing.follows_repository == repo).count() |
|
354 | .filter(UserFollowing.follows_repository == repo).count() | |
354 |
|
355 | |||
355 | def get_forks(self, repo): |
|
356 | def get_forks(self, repo): | |
356 | repo = self._get_repo(repo) |
|
357 | repo = self._get_repo(repo) | |
357 | return self.sa.query(Repository)\ |
|
358 | return self.sa.query(Repository)\ | |
358 | .filter(Repository.fork == repo).count() |
|
359 | .filter(Repository.fork == repo).count() | |
359 |
|
360 | |||
360 | def get_pull_requests(self, repo): |
|
361 | def get_pull_requests(self, repo): | |
361 | repo = self._get_repo(repo) |
|
362 | repo = self._get_repo(repo) | |
362 | return self.sa.query(PullRequest)\ |
|
363 | return self.sa.query(PullRequest)\ | |
363 | .filter(PullRequest.target_repo == repo)\ |
|
364 | .filter(PullRequest.target_repo == repo)\ | |
364 | .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() |
|
365 | .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() | |
365 |
|
366 | |||
366 | def mark_as_fork(self, repo, fork, user): |
|
367 | def mark_as_fork(self, repo, fork, user): | |
367 | repo = self._get_repo(repo) |
|
368 | repo = self._get_repo(repo) | |
368 | fork = self._get_repo(fork) |
|
369 | fork = self._get_repo(fork) | |
369 | if fork and repo.repo_id == fork.repo_id: |
|
370 | if fork and repo.repo_id == fork.repo_id: | |
370 | raise Exception("Cannot set repository as fork of itself") |
|
371 | raise Exception("Cannot set repository as fork of itself") | |
371 |
|
372 | |||
372 | if fork and repo.repo_type != fork.repo_type: |
|
373 | if fork and repo.repo_type != fork.repo_type: | |
373 | raise RepositoryError( |
|
374 | raise RepositoryError( | |
374 | "Cannot set repository as fork of repository with other type") |
|
375 | "Cannot set repository as fork of repository with other type") | |
375 |
|
376 | |||
376 | repo.fork = fork |
|
377 | repo.fork = fork | |
377 | self.sa.add(repo) |
|
378 | self.sa.add(repo) | |
378 | return repo |
|
379 | return repo | |
379 |
|
380 | |||
380 | def pull_changes(self, repo, username, remote_uri=None, validate_uri=True): |
|
381 | def pull_changes(self, repo, username, remote_uri=None, validate_uri=True): | |
381 | dbrepo = self._get_repo(repo) |
|
382 | dbrepo = self._get_repo(repo) | |
382 | remote_uri = remote_uri or dbrepo.clone_uri |
|
383 | remote_uri = remote_uri or dbrepo.clone_uri | |
383 | if not remote_uri: |
|
384 | if not remote_uri: | |
384 | raise Exception("This repository doesn't have a clone uri") |
|
385 | raise Exception("This repository doesn't have a clone uri") | |
385 |
|
386 | |||
386 | repo = dbrepo.scm_instance(cache=False) |
|
387 | repo = dbrepo.scm_instance(cache=False) | |
387 | repo.config.clear_section('hooks') |
|
388 | repo.config.clear_section('hooks') | |
388 |
|
389 | |||
389 | try: |
|
390 | try: | |
390 | # NOTE(marcink): add extra validation so we skip invalid urls |
|
391 | # NOTE(marcink): add extra validation so we skip invalid urls | |
391 | # this is due this tasks can be executed via scheduler without |
|
392 | # this is due this tasks can be executed via scheduler without | |
392 | # proper validation of remote_uri |
|
393 | # proper validation of remote_uri | |
393 | if validate_uri: |
|
394 | if validate_uri: | |
394 | config = make_db_config(clear_session=False) |
|
395 | config = make_db_config(clear_session=False) | |
395 | url_validator(remote_uri, dbrepo.repo_type, config) |
|
396 | url_validator(remote_uri, dbrepo.repo_type, config) | |
396 | except InvalidCloneUrl: |
|
397 | except InvalidCloneUrl: | |
397 | raise |
|
398 | raise | |
398 |
|
399 | |||
399 | repo_name = dbrepo.repo_name |
|
400 | repo_name = dbrepo.repo_name | |
400 | try: |
|
401 | try: | |
401 | # TODO: we need to make sure those operations call proper hooks ! |
|
402 | # TODO: we need to make sure those operations call proper hooks ! | |
402 | repo.fetch(remote_uri) |
|
403 | repo.fetch(remote_uri) | |
403 |
|
404 | |||
404 | self.mark_for_invalidation(repo_name) |
|
405 | self.mark_for_invalidation(repo_name) | |
405 | except Exception: |
|
406 | except Exception: | |
406 | log.error(traceback.format_exc()) |
|
407 | log.error(traceback.format_exc()) | |
407 | raise |
|
408 | raise | |
408 |
|
409 | |||
409 | def push_changes(self, repo, username, remote_uri=None, validate_uri=True): |
|
410 | def push_changes(self, repo, username, remote_uri=None, validate_uri=True): | |
410 | dbrepo = self._get_repo(repo) |
|
411 | dbrepo = self._get_repo(repo) | |
411 | remote_uri = remote_uri or dbrepo.push_uri |
|
412 | remote_uri = remote_uri or dbrepo.push_uri | |
412 | if not remote_uri: |
|
413 | if not remote_uri: | |
413 | raise Exception("This repository doesn't have a clone uri") |
|
414 | raise Exception("This repository doesn't have a clone uri") | |
414 |
|
415 | |||
415 | repo = dbrepo.scm_instance(cache=False) |
|
416 | repo = dbrepo.scm_instance(cache=False) | |
416 | repo.config.clear_section('hooks') |
|
417 | repo.config.clear_section('hooks') | |
417 |
|
418 | |||
418 | try: |
|
419 | try: | |
419 | # NOTE(marcink): add extra validation so we skip invalid urls |
|
420 | # NOTE(marcink): add extra validation so we skip invalid urls | |
420 | # this is due this tasks can be executed via scheduler without |
|
421 | # this is due this tasks can be executed via scheduler without | |
421 | # proper validation of remote_uri |
|
422 | # proper validation of remote_uri | |
422 | if validate_uri: |
|
423 | if validate_uri: | |
423 | config = make_db_config(clear_session=False) |
|
424 | config = make_db_config(clear_session=False) | |
424 | url_validator(remote_uri, dbrepo.repo_type, config) |
|
425 | url_validator(remote_uri, dbrepo.repo_type, config) | |
425 | except InvalidCloneUrl: |
|
426 | except InvalidCloneUrl: | |
426 | raise |
|
427 | raise | |
427 |
|
428 | |||
428 | try: |
|
429 | try: | |
429 | repo.push(remote_uri) |
|
430 | repo.push(remote_uri) | |
430 | except Exception: |
|
431 | except Exception: | |
431 | log.error(traceback.format_exc()) |
|
432 | log.error(traceback.format_exc()) | |
432 | raise |
|
433 | raise | |
433 |
|
434 | |||
434 | def commit_change(self, repo, repo_name, commit, user, author, message, |
|
435 | def commit_change(self, repo, repo_name, commit, user, author, message, | |
435 | content, f_path): |
|
436 | content, f_path): | |
436 | """ |
|
437 | """ | |
437 | Commits changes |
|
438 | Commits changes | |
438 |
|
439 | |||
439 | :param repo: SCM instance |
|
440 | :param repo: SCM instance | |
440 |
|
441 | |||
441 | """ |
|
442 | """ | |
442 | user = self._get_user(user) |
|
443 | user = self._get_user(user) | |
443 |
|
444 | |||
444 | # decoding here will force that we have proper encoded values |
|
445 | # decoding here will force that we have proper encoded values | |
445 | # in any other case this will throw exceptions and deny commit |
|
446 | # in any other case this will throw exceptions and deny commit | |
446 | content = safe_str(content) |
|
447 | content = safe_str(content) | |
447 | path = safe_str(f_path) |
|
448 | path = safe_str(f_path) | |
448 | # message and author needs to be unicode |
|
449 | # message and author needs to be unicode | |
449 | # proper backend should then translate that into required type |
|
450 | # proper backend should then translate that into required type | |
450 | message = safe_unicode(message) |
|
451 | message = safe_unicode(message) | |
451 | author = safe_unicode(author) |
|
452 | author = safe_unicode(author) | |
452 | imc = repo.in_memory_commit |
|
453 | imc = repo.in_memory_commit | |
453 | imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path))) |
|
454 | imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path))) | |
454 | try: |
|
455 | try: | |
455 | # TODO: handle pre-push action ! |
|
456 | # TODO: handle pre-push action ! | |
456 | tip = imc.commit( |
|
457 | tip = imc.commit( | |
457 | message=message, author=author, parents=[commit], |
|
458 | message=message, author=author, parents=[commit], | |
458 | branch=commit.branch) |
|
459 | branch=commit.branch) | |
459 | except Exception as e: |
|
460 | except Exception as e: | |
460 | log.error(traceback.format_exc()) |
|
461 | log.error(traceback.format_exc()) | |
461 | raise IMCCommitError(str(e)) |
|
462 | raise IMCCommitError(str(e)) | |
462 | finally: |
|
463 | finally: | |
463 | # always clear caches, if commit fails we want fresh object also |
|
464 | # always clear caches, if commit fails we want fresh object also | |
464 | self.mark_for_invalidation(repo_name) |
|
465 | self.mark_for_invalidation(repo_name) | |
465 |
|
466 | |||
466 | # We trigger the post-push action |
|
467 | # We trigger the post-push action | |
467 | hooks_utils.trigger_post_push_hook( |
|
468 | hooks_utils.trigger_post_push_hook( | |
468 | username=user.username, action='push_local', hook_type='post_push', |
|
469 | username=user.username, action='push_local', hook_type='post_push', | |
469 | repo_name=repo_name, repo_alias=repo.alias, commit_ids=[tip.raw_id]) |
|
470 | repo_name=repo_name, repo_alias=repo.alias, commit_ids=[tip.raw_id]) | |
470 | return tip |
|
471 | return tip | |
471 |
|
472 | |||
472 | def _sanitize_path(self, f_path): |
|
473 | def _sanitize_path(self, f_path): | |
473 | if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path: |
|
474 | if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path: | |
474 | raise NonRelativePathError('%s is not an relative path' % f_path) |
|
475 | raise NonRelativePathError('%s is not an relative path' % f_path) | |
475 | if f_path: |
|
476 | if f_path: | |
476 | f_path = os.path.normpath(f_path) |
|
477 | f_path = os.path.normpath(f_path) | |
477 | return f_path |
|
478 | return f_path | |
478 |
|
479 | |||
479 | def get_dirnode_metadata(self, request, commit, dir_node): |
|
480 | def get_dirnode_metadata(self, request, commit, dir_node): | |
480 | if not dir_node.is_dir(): |
|
481 | if not dir_node.is_dir(): | |
481 | return [] |
|
482 | return [] | |
482 |
|
483 | |||
483 | data = [] |
|
484 | data = [] | |
484 | for node in dir_node: |
|
485 | for node in dir_node: | |
485 | if not node.is_file(): |
|
486 | if not node.is_file(): | |
486 | # we skip file-nodes |
|
487 | # we skip file-nodes | |
487 | continue |
|
488 | continue | |
488 |
|
489 | |||
489 | last_commit = node.last_commit |
|
490 | last_commit = node.last_commit | |
490 | last_commit_date = last_commit.date |
|
491 | last_commit_date = last_commit.date | |
491 | data.append({ |
|
492 | data.append({ | |
492 | 'name': node.name, |
|
493 | 'name': node.name, | |
493 | 'size': h.format_byte_size_binary(node.size), |
|
494 | 'size': h.format_byte_size_binary(node.size), | |
494 | 'modified_at': h.format_date(last_commit_date), |
|
495 | 'modified_at': h.format_date(last_commit_date), | |
495 | 'modified_ts': last_commit_date.isoformat(), |
|
496 | 'modified_ts': last_commit_date.isoformat(), | |
496 | 'revision': last_commit.revision, |
|
497 | 'revision': last_commit.revision, | |
497 | 'short_id': last_commit.short_id, |
|
498 | 'short_id': last_commit.short_id, | |
498 | 'message': h.escape(last_commit.message), |
|
499 | 'message': h.escape(last_commit.message), | |
499 | 'author': h.escape(last_commit.author), |
|
500 | 'author': h.escape(last_commit.author), | |
500 | 'user_profile': h.gravatar_with_user( |
|
501 | 'user_profile': h.gravatar_with_user( | |
501 | request, last_commit.author), |
|
502 | request, last_commit.author), | |
502 | }) |
|
503 | }) | |
503 |
|
504 | |||
504 | return data |
|
505 | return data | |
505 |
|
506 | |||
506 | def get_nodes(self, repo_name, commit_id, root_path='/', flat=True, |
|
507 | def get_nodes(self, repo_name, commit_id, root_path='/', flat=True, | |
507 | extended_info=False, content=False, max_file_bytes=None): |
|
508 | extended_info=False, content=False, max_file_bytes=None): | |
508 | """ |
|
509 | """ | |
509 | recursive walk in root dir and return a set of all path in that dir |
|
510 | recursive walk in root dir and return a set of all path in that dir | |
510 | based on repository walk function |
|
511 | based on repository walk function | |
511 |
|
512 | |||
512 | :param repo_name: name of repository |
|
513 | :param repo_name: name of repository | |
513 | :param commit_id: commit id for which to list nodes |
|
514 | :param commit_id: commit id for which to list nodes | |
514 | :param root_path: root path to list |
|
515 | :param root_path: root path to list | |
515 | :param flat: return as a list, if False returns a dict with description |
|
516 | :param flat: return as a list, if False returns a dict with description | |
516 | :param extended_info: show additional info such as md5, binary, size etc |
|
517 | :param extended_info: show additional info such as md5, binary, size etc | |
517 | :param content: add nodes content to the return data |
|
518 | :param content: add nodes content to the return data | |
518 | :param max_file_bytes: will not return file contents over this limit |
|
519 | :param max_file_bytes: will not return file contents over this limit | |
519 |
|
520 | |||
520 | """ |
|
521 | """ | |
521 | _files = list() |
|
522 | _files = list() | |
522 | _dirs = list() |
|
523 | _dirs = list() | |
523 | try: |
|
524 | try: | |
524 | _repo = self._get_repo(repo_name) |
|
525 | _repo = self._get_repo(repo_name) | |
525 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) |
|
526 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) | |
526 | root_path = root_path.lstrip('/') |
|
527 | root_path = root_path.lstrip('/') | |
527 | for __, dirs, files in commit.walk(root_path): |
|
528 | for __, dirs, files in commit.walk(root_path): | |
528 |
|
529 | |||
529 | for f in files: |
|
530 | for f in files: | |
530 | _content = None |
|
531 | _content = None | |
531 | _data = f_name = f.unicode_path |
|
532 | _data = f_name = f.unicode_path | |
532 |
|
533 | |||
533 | if not flat: |
|
534 | if not flat: | |
534 | _data = { |
|
535 | _data = { | |
535 | "name": h.escape(f_name), |
|
536 | "name": h.escape(f_name), | |
536 | "type": "file", |
|
537 | "type": "file", | |
537 | } |
|
538 | } | |
538 | if extended_info: |
|
539 | if extended_info: | |
539 | _data.update({ |
|
540 | _data.update({ | |
540 | "md5": f.md5, |
|
541 | "md5": f.md5, | |
541 | "binary": f.is_binary, |
|
542 | "binary": f.is_binary, | |
542 | "size": f.size, |
|
543 | "size": f.size, | |
543 | "extension": f.extension, |
|
544 | "extension": f.extension, | |
544 | "mimetype": f.mimetype, |
|
545 | "mimetype": f.mimetype, | |
545 | "lines": f.lines()[0] |
|
546 | "lines": f.lines()[0] | |
546 | }) |
|
547 | }) | |
547 |
|
548 | |||
548 | if content: |
|
549 | if content: | |
549 | over_size_limit = (max_file_bytes is not None |
|
550 | over_size_limit = (max_file_bytes is not None | |
550 | and f.size > max_file_bytes) |
|
551 | and f.size > max_file_bytes) | |
551 | full_content = None |
|
552 | full_content = None | |
552 | if not f.is_binary and not over_size_limit: |
|
553 | if not f.is_binary and not over_size_limit: | |
553 | full_content = safe_str(f.content) |
|
554 | full_content = safe_str(f.content) | |
554 |
|
555 | |||
555 | _data.update({ |
|
556 | _data.update({ | |
556 | "content": full_content, |
|
557 | "content": full_content, | |
557 | }) |
|
558 | }) | |
558 | _files.append(_data) |
|
559 | _files.append(_data) | |
559 |
|
560 | |||
560 | for d in dirs: |
|
561 | for d in dirs: | |
561 | _data = d_name = d.unicode_path |
|
562 | _data = d_name = d.unicode_path | |
562 | if not flat: |
|
563 | if not flat: | |
563 | _data = { |
|
564 | _data = { | |
564 | "name": h.escape(d_name), |
|
565 | "name": h.escape(d_name), | |
565 | "type": "dir", |
|
566 | "type": "dir", | |
566 | } |
|
567 | } | |
567 | if extended_info: |
|
568 | if extended_info: | |
568 | _data.update({ |
|
569 | _data.update({ | |
569 | "md5": None, |
|
570 | "md5": None, | |
570 | "binary": None, |
|
571 | "binary": None, | |
571 | "size": None, |
|
572 | "size": None, | |
572 | "extension": None, |
|
573 | "extension": None, | |
573 | }) |
|
574 | }) | |
574 | if content: |
|
575 | if content: | |
575 | _data.update({ |
|
576 | _data.update({ | |
576 | "content": None |
|
577 | "content": None | |
577 | }) |
|
578 | }) | |
578 | _dirs.append(_data) |
|
579 | _dirs.append(_data) | |
579 | except RepositoryError: |
|
580 | except RepositoryError: | |
580 | log.exception("Exception in get_nodes") |
|
581 | log.exception("Exception in get_nodes") | |
581 | raise |
|
582 | raise | |
582 |
|
583 | |||
583 | return _dirs, _files |
|
584 | return _dirs, _files | |
584 |
|
585 | |||
585 | def get_node(self, repo_name, commit_id, file_path, |
|
586 | def get_node(self, repo_name, commit_id, file_path, | |
586 | extended_info=False, content=False, max_file_bytes=None, cache=True): |
|
587 | extended_info=False, content=False, max_file_bytes=None, cache=True): | |
587 | """ |
|
588 | """ | |
588 | retrieve single node from commit |
|
589 | retrieve single node from commit | |
589 | """ |
|
590 | """ | |
590 | try: |
|
591 | try: | |
591 |
|
592 | |||
592 | _repo = self._get_repo(repo_name) |
|
593 | _repo = self._get_repo(repo_name) | |
593 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) |
|
594 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) | |
594 |
|
595 | |||
595 | file_node = commit.get_node(file_path) |
|
596 | file_node = commit.get_node(file_path) | |
596 | if file_node.is_dir(): |
|
597 | if file_node.is_dir(): | |
597 | raise RepositoryError('The given path is a directory') |
|
598 | raise RepositoryError('The given path is a directory') | |
598 |
|
599 | |||
599 | _content = None |
|
600 | _content = None | |
600 | f_name = file_node.unicode_path |
|
601 | f_name = file_node.unicode_path | |
601 |
|
602 | |||
602 | file_data = { |
|
603 | file_data = { | |
603 | "name": h.escape(f_name), |
|
604 | "name": h.escape(f_name), | |
604 | "type": "file", |
|
605 | "type": "file", | |
605 | } |
|
606 | } | |
606 |
|
607 | |||
607 | if extended_info: |
|
608 | if extended_info: | |
608 | file_data.update({ |
|
609 | file_data.update({ | |
609 | "extension": file_node.extension, |
|
610 | "extension": file_node.extension, | |
610 | "mimetype": file_node.mimetype, |
|
611 | "mimetype": file_node.mimetype, | |
611 | }) |
|
612 | }) | |
612 |
|
613 | |||
613 | if cache: |
|
614 | if cache: | |
614 | md5 = file_node.md5 |
|
615 | md5 = file_node.md5 | |
615 | is_binary = file_node.is_binary |
|
616 | is_binary = file_node.is_binary | |
616 | size = file_node.size |
|
617 | size = file_node.size | |
617 | else: |
|
618 | else: | |
618 | is_binary, md5, size, _content = file_node.metadata_uncached() |
|
619 | is_binary, md5, size, _content = file_node.metadata_uncached() | |
619 |
|
620 | |||
620 | file_data.update({ |
|
621 | file_data.update({ | |
621 | "md5": md5, |
|
622 | "md5": md5, | |
622 | "binary": is_binary, |
|
623 | "binary": is_binary, | |
623 | "size": size, |
|
624 | "size": size, | |
624 | }) |
|
625 | }) | |
625 |
|
626 | |||
626 | if content and cache: |
|
627 | if content and cache: | |
627 | # get content + cache |
|
628 | # get content + cache | |
628 | size = file_node.size |
|
629 | size = file_node.size | |
629 | over_size_limit = (max_file_bytes is not None and size > max_file_bytes) |
|
630 | over_size_limit = (max_file_bytes is not None and size > max_file_bytes) | |
630 | full_content = None |
|
631 | full_content = None | |
631 | if not file_node.is_binary and not over_size_limit: |
|
632 | if not file_node.is_binary and not over_size_limit: | |
632 | full_content = safe_unicode(file_node.content) |
|
633 | full_content = safe_unicode(file_node.content) | |
633 |
|
634 | |||
634 | file_data.update({ |
|
635 | file_data.update({ | |
635 | "content": full_content, |
|
636 | "content": full_content, | |
636 | }) |
|
637 | }) | |
637 | elif content: |
|
638 | elif content: | |
638 | # get content *without* cache |
|
639 | # get content *without* cache | |
639 | if _content is None: |
|
640 | if _content is None: | |
640 | is_binary, md5, size, _content = file_node.metadata_uncached() |
|
641 | is_binary, md5, size, _content = file_node.metadata_uncached() | |
641 |
|
642 | |||
642 | over_size_limit = (max_file_bytes is not None and size > max_file_bytes) |
|
643 | over_size_limit = (max_file_bytes is not None and size > max_file_bytes) | |
643 | full_content = None |
|
644 | full_content = None | |
644 | if not is_binary and not over_size_limit: |
|
645 | if not is_binary and not over_size_limit: | |
645 | full_content = safe_unicode(_content) |
|
646 | full_content = safe_unicode(_content) | |
646 |
|
647 | |||
647 | file_data.update({ |
|
648 | file_data.update({ | |
648 | "content": full_content, |
|
649 | "content": full_content, | |
649 | }) |
|
650 | }) | |
650 |
|
651 | |||
651 | except RepositoryError: |
|
652 | except RepositoryError: | |
652 | log.exception("Exception in get_node") |
|
653 | log.exception("Exception in get_node") | |
653 | raise |
|
654 | raise | |
654 |
|
655 | |||
655 | return file_data |
|
656 | return file_data | |
656 |
|
657 | |||
657 | def get_fts_data(self, repo_name, commit_id, root_path='/'): |
|
658 | def get_fts_data(self, repo_name, commit_id, root_path='/'): | |
658 | """ |
|
659 | """ | |
659 | Fetch node tree for usage in full text search |
|
660 | Fetch node tree for usage in full text search | |
660 | """ |
|
661 | """ | |
661 |
|
662 | |||
662 | tree_info = list() |
|
663 | tree_info = list() | |
663 |
|
664 | |||
664 | try: |
|
665 | try: | |
665 | _repo = self._get_repo(repo_name) |
|
666 | _repo = self._get_repo(repo_name) | |
666 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) |
|
667 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) | |
667 | root_path = root_path.lstrip('/') |
|
668 | root_path = root_path.lstrip('/') | |
668 | for __, dirs, files in commit.walk(root_path): |
|
669 | for __, dirs, files in commit.walk(root_path): | |
669 |
|
670 | |||
670 | for f in files: |
|
671 | for f in files: | |
671 | is_binary, md5, size, _content = f.metadata_uncached() |
|
672 | is_binary, md5, size, _content = f.metadata_uncached() | |
672 | _data = { |
|
673 | _data = { | |
673 | "name": f.unicode_path, |
|
674 | "name": f.unicode_path, | |
674 | "md5": md5, |
|
675 | "md5": md5, | |
675 | "extension": f.extension, |
|
676 | "extension": f.extension, | |
676 | "binary": is_binary, |
|
677 | "binary": is_binary, | |
677 | "size": size |
|
678 | "size": size | |
678 | } |
|
679 | } | |
679 |
|
680 | |||
680 | tree_info.append(_data) |
|
681 | tree_info.append(_data) | |
681 |
|
682 | |||
682 | except RepositoryError: |
|
683 | except RepositoryError: | |
683 | log.exception("Exception in get_nodes") |
|
684 | log.exception("Exception in get_nodes") | |
684 | raise |
|
685 | raise | |
685 |
|
686 | |||
686 | return tree_info |
|
687 | return tree_info | |
687 |
|
688 | |||
688 | def create_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
689 | def create_nodes(self, user, repo, message, nodes, parent_commit=None, | |
689 | author=None, trigger_push_hook=True): |
|
690 | author=None, trigger_push_hook=True): | |
690 | """ |
|
691 | """ | |
691 | Commits given multiple nodes into repo |
|
692 | Commits given multiple nodes into repo | |
692 |
|
693 | |||
693 | :param user: RhodeCode User object or user_id, the commiter |
|
694 | :param user: RhodeCode User object or user_id, the commiter | |
694 | :param repo: RhodeCode Repository object |
|
695 | :param repo: RhodeCode Repository object | |
695 | :param message: commit message |
|
696 | :param message: commit message | |
696 | :param nodes: mapping {filename:{'content':content},...} |
|
697 | :param nodes: mapping {filename:{'content':content},...} | |
697 | :param parent_commit: parent commit, can be empty than it's |
|
698 | :param parent_commit: parent commit, can be empty than it's | |
698 | initial commit |
|
699 | initial commit | |
699 | :param author: author of commit, cna be different that commiter |
|
700 | :param author: author of commit, cna be different that commiter | |
700 | only for git |
|
701 | only for git | |
701 | :param trigger_push_hook: trigger push hooks |
|
702 | :param trigger_push_hook: trigger push hooks | |
702 |
|
703 | |||
703 | :returns: new commited commit |
|
704 | :returns: new commited commit | |
704 | """ |
|
705 | """ | |
705 |
|
706 | |||
706 | user = self._get_user(user) |
|
707 | user = self._get_user(user) | |
707 | scm_instance = repo.scm_instance(cache=False) |
|
708 | scm_instance = repo.scm_instance(cache=False) | |
708 |
|
709 | |||
709 | processed_nodes = [] |
|
710 | processed_nodes = [] | |
710 | for f_path in nodes: |
|
711 | for f_path in nodes: | |
711 | f_path = self._sanitize_path(f_path) |
|
712 | f_path = self._sanitize_path(f_path) | |
712 | content = nodes[f_path]['content'] |
|
713 | content = nodes[f_path]['content'] | |
713 | f_path = safe_str(f_path) |
|
714 | f_path = safe_str(f_path) | |
714 | # decoding here will force that we have proper encoded values |
|
715 | # decoding here will force that we have proper encoded values | |
715 | # in any other case this will throw exceptions and deny commit |
|
716 | # in any other case this will throw exceptions and deny commit | |
716 | if isinstance(content, (basestring,)): |
|
717 | if isinstance(content, (basestring,)): | |
717 | content = safe_str(content) |
|
718 | content = safe_str(content) | |
718 | elif isinstance(content, (file, cStringIO.OutputType,)): |
|
719 | elif isinstance(content, (file, cStringIO.OutputType,)): | |
719 | content = content.read() |
|
720 | content = content.read() | |
720 | else: |
|
721 | else: | |
721 | raise Exception('Content is of unrecognized type %s' % ( |
|
722 | raise Exception('Content is of unrecognized type %s' % ( | |
722 | type(content) |
|
723 | type(content) | |
723 | )) |
|
724 | )) | |
724 | processed_nodes.append((f_path, content)) |
|
725 | processed_nodes.append((f_path, content)) | |
725 |
|
726 | |||
726 | message = safe_unicode(message) |
|
727 | message = safe_unicode(message) | |
727 | commiter = user.full_contact |
|
728 | commiter = user.full_contact | |
728 | author = safe_unicode(author) if author else commiter |
|
729 | author = safe_unicode(author) if author else commiter | |
729 |
|
730 | |||
730 | imc = scm_instance.in_memory_commit |
|
731 | imc = scm_instance.in_memory_commit | |
731 |
|
732 | |||
732 | if not parent_commit: |
|
733 | if not parent_commit: | |
733 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
734 | parent_commit = EmptyCommit(alias=scm_instance.alias) | |
734 |
|
735 | |||
735 | if isinstance(parent_commit, EmptyCommit): |
|
736 | if isinstance(parent_commit, EmptyCommit): | |
736 | # EmptyCommit means we we're editing empty repository |
|
737 | # EmptyCommit means we we're editing empty repository | |
737 | parents = None |
|
738 | parents = None | |
738 | else: |
|
739 | else: | |
739 | parents = [parent_commit] |
|
740 | parents = [parent_commit] | |
740 | # add multiple nodes |
|
741 | # add multiple nodes | |
741 | for path, content in processed_nodes: |
|
742 | for path, content in processed_nodes: | |
742 | imc.add(FileNode(path, content=content)) |
|
743 | imc.add(FileNode(path, content=content)) | |
743 | # TODO: handle pre push scenario |
|
744 | # TODO: handle pre push scenario | |
744 | tip = imc.commit(message=message, |
|
745 | tip = imc.commit(message=message, | |
745 | author=author, |
|
746 | author=author, | |
746 | parents=parents, |
|
747 | parents=parents, | |
747 | branch=parent_commit.branch) |
|
748 | branch=parent_commit.branch) | |
748 |
|
749 | |||
749 | self.mark_for_invalidation(repo.repo_name) |
|
750 | self.mark_for_invalidation(repo.repo_name) | |
750 | if trigger_push_hook: |
|
751 | if trigger_push_hook: | |
751 | hooks_utils.trigger_post_push_hook( |
|
752 | hooks_utils.trigger_post_push_hook( | |
752 | username=user.username, action='push_local', |
|
753 | username=user.username, action='push_local', | |
753 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, |
|
754 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, | |
754 | hook_type='post_push', |
|
755 | hook_type='post_push', | |
755 | commit_ids=[tip.raw_id]) |
|
756 | commit_ids=[tip.raw_id]) | |
756 | return tip |
|
757 | return tip | |
757 |
|
758 | |||
758 | def update_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
759 | def update_nodes(self, user, repo, message, nodes, parent_commit=None, | |
759 | author=None, trigger_push_hook=True): |
|
760 | author=None, trigger_push_hook=True): | |
760 | user = self._get_user(user) |
|
761 | user = self._get_user(user) | |
761 | scm_instance = repo.scm_instance(cache=False) |
|
762 | scm_instance = repo.scm_instance(cache=False) | |
762 |
|
763 | |||
763 | message = safe_unicode(message) |
|
764 | message = safe_unicode(message) | |
764 | commiter = user.full_contact |
|
765 | commiter = user.full_contact | |
765 | author = safe_unicode(author) if author else commiter |
|
766 | author = safe_unicode(author) if author else commiter | |
766 |
|
767 | |||
767 | imc = scm_instance.in_memory_commit |
|
768 | imc = scm_instance.in_memory_commit | |
768 |
|
769 | |||
769 | if not parent_commit: |
|
770 | if not parent_commit: | |
770 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
771 | parent_commit = EmptyCommit(alias=scm_instance.alias) | |
771 |
|
772 | |||
772 | if isinstance(parent_commit, EmptyCommit): |
|
773 | if isinstance(parent_commit, EmptyCommit): | |
773 | # EmptyCommit means we we're editing empty repository |
|
774 | # EmptyCommit means we we're editing empty repository | |
774 | parents = None |
|
775 | parents = None | |
775 | else: |
|
776 | else: | |
776 | parents = [parent_commit] |
|
777 | parents = [parent_commit] | |
777 |
|
778 | |||
778 | # add multiple nodes |
|
779 | # add multiple nodes | |
779 | for _filename, data in nodes.items(): |
|
780 | for _filename, data in nodes.items(): | |
780 | # new filename, can be renamed from the old one, also sanitaze |
|
781 | # new filename, can be renamed from the old one, also sanitaze | |
781 | # the path for any hack around relative paths like ../../ etc. |
|
782 | # the path for any hack around relative paths like ../../ etc. | |
782 | filename = self._sanitize_path(data['filename']) |
|
783 | filename = self._sanitize_path(data['filename']) | |
783 | old_filename = self._sanitize_path(_filename) |
|
784 | old_filename = self._sanitize_path(_filename) | |
784 | content = data['content'] |
|
785 | content = data['content'] | |
785 | file_mode = data.get('mode') |
|
786 | file_mode = data.get('mode') | |
786 | filenode = FileNode(old_filename, content=content, mode=file_mode) |
|
787 | filenode = FileNode(old_filename, content=content, mode=file_mode) | |
787 | op = data['op'] |
|
788 | op = data['op'] | |
788 | if op == 'add': |
|
789 | if op == 'add': | |
789 | imc.add(filenode) |
|
790 | imc.add(filenode) | |
790 | elif op == 'del': |
|
791 | elif op == 'del': | |
791 | imc.remove(filenode) |
|
792 | imc.remove(filenode) | |
792 | elif op == 'mod': |
|
793 | elif op == 'mod': | |
793 | if filename != old_filename: |
|
794 | if filename != old_filename: | |
794 | # TODO: handle renames more efficient, needs vcs lib changes |
|
795 | # TODO: handle renames more efficient, needs vcs lib changes | |
795 | imc.remove(filenode) |
|
796 | imc.remove(filenode) | |
796 | imc.add(FileNode(filename, content=content, mode=file_mode)) |
|
797 | imc.add(FileNode(filename, content=content, mode=file_mode)) | |
797 | else: |
|
798 | else: | |
798 | imc.change(filenode) |
|
799 | imc.change(filenode) | |
799 |
|
800 | |||
800 | try: |
|
801 | try: | |
801 | # TODO: handle pre push scenario commit changes |
|
802 | # TODO: handle pre push scenario commit changes | |
802 | tip = imc.commit(message=message, |
|
803 | tip = imc.commit(message=message, | |
803 | author=author, |
|
804 | author=author, | |
804 | parents=parents, |
|
805 | parents=parents, | |
805 | branch=parent_commit.branch) |
|
806 | branch=parent_commit.branch) | |
806 | except NodeNotChangedError: |
|
807 | except NodeNotChangedError: | |
807 | raise |
|
808 | raise | |
808 | except Exception as e: |
|
809 | except Exception as e: | |
809 | log.exception("Unexpected exception during call to imc.commit") |
|
810 | log.exception("Unexpected exception during call to imc.commit") | |
810 | raise IMCCommitError(str(e)) |
|
811 | raise IMCCommitError(str(e)) | |
811 | finally: |
|
812 | finally: | |
812 | # always clear caches, if commit fails we want fresh object also |
|
813 | # always clear caches, if commit fails we want fresh object also | |
813 | self.mark_for_invalidation(repo.repo_name) |
|
814 | self.mark_for_invalidation(repo.repo_name) | |
814 |
|
815 | |||
815 | if trigger_push_hook: |
|
816 | if trigger_push_hook: | |
816 | hooks_utils.trigger_post_push_hook( |
|
817 | hooks_utils.trigger_post_push_hook( | |
817 | username=user.username, action='push_local', hook_type='post_push', |
|
818 | username=user.username, action='push_local', hook_type='post_push', | |
818 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, |
|
819 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, | |
819 | commit_ids=[tip.raw_id]) |
|
820 | commit_ids=[tip.raw_id]) | |
820 |
|
821 | |||
821 | return tip |
|
822 | return tip | |
822 |
|
823 | |||
823 | def delete_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
824 | def delete_nodes(self, user, repo, message, nodes, parent_commit=None, | |
824 | author=None, trigger_push_hook=True): |
|
825 | author=None, trigger_push_hook=True): | |
825 | """ |
|
826 | """ | |
826 | Deletes given multiple nodes into `repo` |
|
827 | Deletes given multiple nodes into `repo` | |
827 |
|
828 | |||
828 | :param user: RhodeCode User object or user_id, the committer |
|
829 | :param user: RhodeCode User object or user_id, the committer | |
829 | :param repo: RhodeCode Repository object |
|
830 | :param repo: RhodeCode Repository object | |
830 | :param message: commit message |
|
831 | :param message: commit message | |
831 | :param nodes: mapping {filename:{'content':content},...} |
|
832 | :param nodes: mapping {filename:{'content':content},...} | |
832 | :param parent_commit: parent commit, can be empty than it's initial |
|
833 | :param parent_commit: parent commit, can be empty than it's initial | |
833 | commit |
|
834 | commit | |
834 | :param author: author of commit, cna be different that commiter only |
|
835 | :param author: author of commit, cna be different that commiter only | |
835 | for git |
|
836 | for git | |
836 | :param trigger_push_hook: trigger push hooks |
|
837 | :param trigger_push_hook: trigger push hooks | |
837 |
|
838 | |||
838 | :returns: new commit after deletion |
|
839 | :returns: new commit after deletion | |
839 | """ |
|
840 | """ | |
840 |
|
841 | |||
841 | user = self._get_user(user) |
|
842 | user = self._get_user(user) | |
842 | scm_instance = repo.scm_instance(cache=False) |
|
843 | scm_instance = repo.scm_instance(cache=False) | |
843 |
|
844 | |||
844 | processed_nodes = [] |
|
845 | processed_nodes = [] | |
845 | for f_path in nodes: |
|
846 | for f_path in nodes: | |
846 | f_path = self._sanitize_path(f_path) |
|
847 | f_path = self._sanitize_path(f_path) | |
847 | # content can be empty but for compatabilty it allows same dicts |
|
848 | # content can be empty but for compatabilty it allows same dicts | |
848 | # structure as add_nodes |
|
849 | # structure as add_nodes | |
849 | content = nodes[f_path].get('content') |
|
850 | content = nodes[f_path].get('content') | |
850 | processed_nodes.append((f_path, content)) |
|
851 | processed_nodes.append((f_path, content)) | |
851 |
|
852 | |||
852 | message = safe_unicode(message) |
|
853 | message = safe_unicode(message) | |
853 | commiter = user.full_contact |
|
854 | commiter = user.full_contact | |
854 | author = safe_unicode(author) if author else commiter |
|
855 | author = safe_unicode(author) if author else commiter | |
855 |
|
856 | |||
856 | imc = scm_instance.in_memory_commit |
|
857 | imc = scm_instance.in_memory_commit | |
857 |
|
858 | |||
858 | if not parent_commit: |
|
859 | if not parent_commit: | |
859 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
860 | parent_commit = EmptyCommit(alias=scm_instance.alias) | |
860 |
|
861 | |||
861 | if isinstance(parent_commit, EmptyCommit): |
|
862 | if isinstance(parent_commit, EmptyCommit): | |
862 | # EmptyCommit means we we're editing empty repository |
|
863 | # EmptyCommit means we we're editing empty repository | |
863 | parents = None |
|
864 | parents = None | |
864 | else: |
|
865 | else: | |
865 | parents = [parent_commit] |
|
866 | parents = [parent_commit] | |
866 | # add multiple nodes |
|
867 | # add multiple nodes | |
867 | for path, content in processed_nodes: |
|
868 | for path, content in processed_nodes: | |
868 | imc.remove(FileNode(path, content=content)) |
|
869 | imc.remove(FileNode(path, content=content)) | |
869 |
|
870 | |||
870 | # TODO: handle pre push scenario |
|
871 | # TODO: handle pre push scenario | |
871 | tip = imc.commit(message=message, |
|
872 | tip = imc.commit(message=message, | |
872 | author=author, |
|
873 | author=author, | |
873 | parents=parents, |
|
874 | parents=parents, | |
874 | branch=parent_commit.branch) |
|
875 | branch=parent_commit.branch) | |
875 |
|
876 | |||
876 | self.mark_for_invalidation(repo.repo_name) |
|
877 | self.mark_for_invalidation(repo.repo_name) | |
877 | if trigger_push_hook: |
|
878 | if trigger_push_hook: | |
878 | hooks_utils.trigger_post_push_hook( |
|
879 | hooks_utils.trigger_post_push_hook( | |
879 | username=user.username, action='push_local', hook_type='post_push', |
|
880 | username=user.username, action='push_local', hook_type='post_push', | |
880 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, |
|
881 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, | |
881 | commit_ids=[tip.raw_id]) |
|
882 | commit_ids=[tip.raw_id]) | |
882 | return tip |
|
883 | return tip | |
883 |
|
884 | |||
884 | def strip(self, repo, commit_id, branch): |
|
885 | def strip(self, repo, commit_id, branch): | |
885 | scm_instance = repo.scm_instance(cache=False) |
|
886 | scm_instance = repo.scm_instance(cache=False) | |
886 | scm_instance.config.clear_section('hooks') |
|
887 | scm_instance.config.clear_section('hooks') | |
887 | scm_instance.strip(commit_id, branch) |
|
888 | scm_instance.strip(commit_id, branch) | |
888 | self.mark_for_invalidation(repo.repo_name) |
|
889 | self.mark_for_invalidation(repo.repo_name) | |
889 |
|
890 | |||
890 | def get_unread_journal(self): |
|
891 | def get_unread_journal(self): | |
891 | return self.sa.query(UserLog).count() |
|
892 | return self.sa.query(UserLog).count() | |
892 |
|
893 | |||
893 | def get_repo_landing_revs(self, translator, repo=None): |
|
894 | def get_repo_landing_revs(self, translator, repo=None): | |
894 | """ |
|
895 | """ | |
895 | Generates select option with tags branches and bookmarks (for hg only) |
|
896 | Generates select option with tags branches and bookmarks (for hg only) | |
896 | grouped by type |
|
897 | grouped by type | |
897 |
|
898 | |||
898 | :param repo: |
|
899 | :param repo: | |
899 | """ |
|
900 | """ | |
900 | _ = translator |
|
901 | _ = translator | |
901 | repo = self._get_repo(repo) |
|
902 | repo = self._get_repo(repo) | |
902 |
|
903 | |||
903 | hist_l = [ |
|
904 | hist_l = [ | |
904 | ['rev:tip', _('latest tip')] |
|
905 | ['rev:tip', _('latest tip')] | |
905 | ] |
|
906 | ] | |
906 | choices = [ |
|
907 | choices = [ | |
907 | 'rev:tip' |
|
908 | 'rev:tip' | |
908 | ] |
|
909 | ] | |
909 |
|
910 | |||
910 | if not repo: |
|
911 | if not repo: | |
911 | return choices, hist_l |
|
912 | return choices, hist_l | |
912 |
|
913 | |||
913 | repo = repo.scm_instance() |
|
914 | repo = repo.scm_instance() | |
914 |
|
915 | |||
915 | branches_group = ( |
|
916 | branches_group = ( | |
916 | [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) |
|
917 | [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) | |
917 | for b in repo.branches], |
|
918 | for b in repo.branches], | |
918 | _("Branches")) |
|
919 | _("Branches")) | |
919 | hist_l.append(branches_group) |
|
920 | hist_l.append(branches_group) | |
920 | choices.extend([x[0] for x in branches_group[0]]) |
|
921 | choices.extend([x[0] for x in branches_group[0]]) | |
921 |
|
922 | |||
922 | if repo.alias == 'hg': |
|
923 | if repo.alias == 'hg': | |
923 | bookmarks_group = ( |
|
924 | bookmarks_group = ( | |
924 | [(u'book:%s' % safe_unicode(b), safe_unicode(b)) |
|
925 | [(u'book:%s' % safe_unicode(b), safe_unicode(b)) | |
925 | for b in repo.bookmarks], |
|
926 | for b in repo.bookmarks], | |
926 | _("Bookmarks")) |
|
927 | _("Bookmarks")) | |
927 | hist_l.append(bookmarks_group) |
|
928 | hist_l.append(bookmarks_group) | |
928 | choices.extend([x[0] for x in bookmarks_group[0]]) |
|
929 | choices.extend([x[0] for x in bookmarks_group[0]]) | |
929 |
|
930 | |||
930 | tags_group = ( |
|
931 | tags_group = ( | |
931 | [(u'tag:%s' % safe_unicode(t), safe_unicode(t)) |
|
932 | [(u'tag:%s' % safe_unicode(t), safe_unicode(t)) | |
932 | for t in repo.tags], |
|
933 | for t in repo.tags], | |
933 | _("Tags")) |
|
934 | _("Tags")) | |
934 | hist_l.append(tags_group) |
|
935 | hist_l.append(tags_group) | |
935 | choices.extend([x[0] for x in tags_group[0]]) |
|
936 | choices.extend([x[0] for x in tags_group[0]]) | |
936 |
|
937 | |||
937 | return choices, hist_l |
|
938 | return choices, hist_l | |
938 |
|
939 | |||
939 | def get_server_info(self, environ=None): |
|
940 | def get_server_info(self, environ=None): | |
940 | server_info = get_system_info(environ) |
|
941 | server_info = get_system_info(environ) | |
941 | return server_info |
|
942 | return server_info |
@@ -1,1902 +1,1902 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import collections |
|
21 | import collections | |
22 | import datetime |
|
22 | import datetime | |
23 | import hashlib |
|
23 | import hashlib | |
24 | import os |
|
24 | import os | |
25 | import re |
|
25 | import re | |
26 | import pprint |
|
26 | import pprint | |
27 | import shutil |
|
27 | import shutil | |
28 | import socket |
|
28 | import socket | |
29 | import subprocess32 |
|
29 | import subprocess32 | |
30 | import time |
|
30 | import time | |
31 | import uuid |
|
31 | import uuid | |
32 | import dateutil.tz |
|
32 | import dateutil.tz | |
33 | import functools |
|
33 | import functools | |
34 |
|
34 | |||
35 | import mock |
|
35 | import mock | |
36 | import pyramid.testing |
|
36 | import pyramid.testing | |
37 | import pytest |
|
37 | import pytest | |
38 | import colander |
|
38 | import colander | |
39 | import requests |
|
39 | import requests | |
40 | import pyramid.paster |
|
40 | import pyramid.paster | |
41 |
|
41 | |||
42 | import rhodecode |
|
42 | import rhodecode | |
43 | from rhodecode.lib.utils2 import AttributeDict |
|
43 | from rhodecode.lib.utils2 import AttributeDict | |
44 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
44 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
45 | from rhodecode.model.comment import CommentsModel |
|
45 | from rhodecode.model.comment import CommentsModel | |
46 | from rhodecode.model.db import ( |
|
46 | from rhodecode.model.db import ( | |
47 | PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup, |
|
47 | PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup, | |
48 | UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi) |
|
48 | UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi) | |
49 | from rhodecode.model.meta import Session |
|
49 | from rhodecode.model.meta import Session | |
50 | from rhodecode.model.pull_request import PullRequestModel |
|
50 | from rhodecode.model.pull_request import PullRequestModel | |
51 | from rhodecode.model.repo import RepoModel |
|
51 | from rhodecode.model.repo import RepoModel | |
52 | from rhodecode.model.repo_group import RepoGroupModel |
|
52 | from rhodecode.model.repo_group import RepoGroupModel | |
53 | from rhodecode.model.user import UserModel |
|
53 | from rhodecode.model.user import UserModel | |
54 | from rhodecode.model.settings import VcsSettingsModel |
|
54 | from rhodecode.model.settings import VcsSettingsModel | |
55 | from rhodecode.model.user_group import UserGroupModel |
|
55 | from rhodecode.model.user_group import UserGroupModel | |
56 | from rhodecode.model.integration import IntegrationModel |
|
56 | from rhodecode.model.integration import IntegrationModel | |
57 | from rhodecode.integrations import integration_type_registry |
|
57 | from rhodecode.integrations import integration_type_registry | |
58 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
58 | from rhodecode.integrations.types.base import IntegrationTypeBase | |
59 | from rhodecode.lib.utils import repo2db_mapper |
|
59 | from rhodecode.lib.utils import repo2db_mapper | |
60 | from rhodecode.lib.vcs import create_vcsserver_proxy |
|
60 | from rhodecode.lib.vcs import create_vcsserver_proxy | |
61 | from rhodecode.lib.vcs.backends import get_backend |
|
61 | from rhodecode.lib.vcs.backends import get_backend | |
62 | from rhodecode.lib.vcs.nodes import FileNode |
|
62 | from rhodecode.lib.vcs.nodes import FileNode | |
63 | from rhodecode.tests import ( |
|
63 | from rhodecode.tests import ( | |
64 | login_user_session, get_new_dir, utils, TESTS_TMP_PATH, |
|
64 | login_user_session, get_new_dir, utils, TESTS_TMP_PATH, | |
65 | TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN, |
|
65 | TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN, | |
66 | TEST_USER_REGULAR_PASS) |
|
66 | TEST_USER_REGULAR_PASS) | |
67 | from rhodecode.tests.utils import CustomTestApp, set_anonymous_access |
|
67 | from rhodecode.tests.utils import CustomTestApp, set_anonymous_access | |
68 | from rhodecode.tests.fixture import Fixture |
|
68 | from rhodecode.tests.fixture import Fixture | |
69 | from rhodecode.config import utils as config_utils |
|
69 | from rhodecode.config import utils as config_utils | |
70 |
|
70 | |||
71 | def _split_comma(value): |
|
71 | def _split_comma(value): | |
72 | return value.split(',') |
|
72 | return value.split(',') | |
73 |
|
73 | |||
74 |
|
74 | |||
75 | def pytest_addoption(parser): |
|
75 | def pytest_addoption(parser): | |
76 | parser.addoption( |
|
76 | parser.addoption( | |
77 | '--keep-tmp-path', action='store_true', |
|
77 | '--keep-tmp-path', action='store_true', | |
78 | help="Keep the test temporary directories") |
|
78 | help="Keep the test temporary directories") | |
79 | parser.addoption( |
|
79 | parser.addoption( | |
80 | '--backends', action='store', type=_split_comma, |
|
80 | '--backends', action='store', type=_split_comma, | |
81 | default=['git', 'hg', 'svn'], |
|
81 | default=['git', 'hg', 'svn'], | |
82 | help="Select which backends to test for backend specific tests.") |
|
82 | help="Select which backends to test for backend specific tests.") | |
83 | parser.addoption( |
|
83 | parser.addoption( | |
84 | '--dbs', action='store', type=_split_comma, |
|
84 | '--dbs', action='store', type=_split_comma, | |
85 | default=['sqlite'], |
|
85 | default=['sqlite'], | |
86 | help="Select which database to test for database specific tests. " |
|
86 | help="Select which database to test for database specific tests. " | |
87 | "Possible options are sqlite,postgres,mysql") |
|
87 | "Possible options are sqlite,postgres,mysql") | |
88 | parser.addoption( |
|
88 | parser.addoption( | |
89 | '--appenlight', '--ae', action='store_true', |
|
89 | '--appenlight', '--ae', action='store_true', | |
90 | help="Track statistics in appenlight.") |
|
90 | help="Track statistics in appenlight.") | |
91 | parser.addoption( |
|
91 | parser.addoption( | |
92 | '--appenlight-api-key', '--ae-key', |
|
92 | '--appenlight-api-key', '--ae-key', | |
93 | help="API key for Appenlight.") |
|
93 | help="API key for Appenlight.") | |
94 | parser.addoption( |
|
94 | parser.addoption( | |
95 | '--appenlight-url', '--ae-url', |
|
95 | '--appenlight-url', '--ae-url', | |
96 | default="https://ae.rhodecode.com", |
|
96 | default="https://ae.rhodecode.com", | |
97 | help="Appenlight service URL, defaults to https://ae.rhodecode.com") |
|
97 | help="Appenlight service URL, defaults to https://ae.rhodecode.com") | |
98 | parser.addoption( |
|
98 | parser.addoption( | |
99 | '--sqlite-connection-string', action='store', |
|
99 | '--sqlite-connection-string', action='store', | |
100 | default='', help="Connection string for the dbs tests with SQLite") |
|
100 | default='', help="Connection string for the dbs tests with SQLite") | |
101 | parser.addoption( |
|
101 | parser.addoption( | |
102 | '--postgres-connection-string', action='store', |
|
102 | '--postgres-connection-string', action='store', | |
103 | default='', help="Connection string for the dbs tests with Postgres") |
|
103 | default='', help="Connection string for the dbs tests with Postgres") | |
104 | parser.addoption( |
|
104 | parser.addoption( | |
105 | '--mysql-connection-string', action='store', |
|
105 | '--mysql-connection-string', action='store', | |
106 | default='', help="Connection string for the dbs tests with MySQL") |
|
106 | default='', help="Connection string for the dbs tests with MySQL") | |
107 | parser.addoption( |
|
107 | parser.addoption( | |
108 | '--repeat', type=int, default=100, |
|
108 | '--repeat', type=int, default=100, | |
109 | help="Number of repetitions in performance tests.") |
|
109 | help="Number of repetitions in performance tests.") | |
110 |
|
110 | |||
111 |
|
111 | |||
112 | def pytest_configure(config): |
|
112 | def pytest_configure(config): | |
113 | from rhodecode.config import patches |
|
113 | from rhodecode.config import patches | |
114 |
|
114 | |||
115 |
|
115 | |||
116 | def pytest_collection_modifyitems(session, config, items): |
|
116 | def pytest_collection_modifyitems(session, config, items): | |
117 | # nottest marked, compare nose, used for transition from nose to pytest |
|
117 | # nottest marked, compare nose, used for transition from nose to pytest | |
118 | remaining = [ |
|
118 | remaining = [ | |
119 | i for i in items if getattr(i.obj, '__test__', True)] |
|
119 | i for i in items if getattr(i.obj, '__test__', True)] | |
120 | items[:] = remaining |
|
120 | items[:] = remaining | |
121 |
|
121 | |||
122 |
|
122 | |||
123 | def pytest_generate_tests(metafunc): |
|
123 | def pytest_generate_tests(metafunc): | |
124 | # Support test generation based on --backend parameter |
|
124 | # Support test generation based on --backend parameter | |
125 | if 'backend_alias' in metafunc.fixturenames: |
|
125 | if 'backend_alias' in metafunc.fixturenames: | |
126 | backends = get_backends_from_metafunc(metafunc) |
|
126 | backends = get_backends_from_metafunc(metafunc) | |
127 | scope = None |
|
127 | scope = None | |
128 | if not backends: |
|
128 | if not backends: | |
129 | pytest.skip("Not enabled for any of selected backends") |
|
129 | pytest.skip("Not enabled for any of selected backends") | |
130 | metafunc.parametrize('backend_alias', backends, scope=scope) |
|
130 | metafunc.parametrize('backend_alias', backends, scope=scope) | |
131 | elif hasattr(metafunc.function, 'backends'): |
|
131 | elif hasattr(metafunc.function, 'backends'): | |
132 | backends = get_backends_from_metafunc(metafunc) |
|
132 | backends = get_backends_from_metafunc(metafunc) | |
133 | if not backends: |
|
133 | if not backends: | |
134 | pytest.skip("Not enabled for any of selected backends") |
|
134 | pytest.skip("Not enabled for any of selected backends") | |
135 |
|
135 | |||
136 |
|
136 | |||
137 | def get_backends_from_metafunc(metafunc): |
|
137 | def get_backends_from_metafunc(metafunc): | |
138 | requested_backends = set(metafunc.config.getoption('--backends')) |
|
138 | requested_backends = set(metafunc.config.getoption('--backends')) | |
139 | if hasattr(metafunc.function, 'backends'): |
|
139 | if hasattr(metafunc.function, 'backends'): | |
140 | # Supported backends by this test function, created from |
|
140 | # Supported backends by this test function, created from | |
141 | # pytest.mark.backends |
|
141 | # pytest.mark.backends | |
142 | backends = metafunc.definition.get_closest_marker('backends').args |
|
142 | backends = metafunc.definition.get_closest_marker('backends').args | |
143 | elif hasattr(metafunc.cls, 'backend_alias'): |
|
143 | elif hasattr(metafunc.cls, 'backend_alias'): | |
144 | # Support class attribute "backend_alias", this is mainly |
|
144 | # Support class attribute "backend_alias", this is mainly | |
145 | # for legacy reasons for tests not yet using pytest.mark.backends |
|
145 | # for legacy reasons for tests not yet using pytest.mark.backends | |
146 | backends = [metafunc.cls.backend_alias] |
|
146 | backends = [metafunc.cls.backend_alias] | |
147 | else: |
|
147 | else: | |
148 | backends = metafunc.config.getoption('--backends') |
|
148 | backends = metafunc.config.getoption('--backends') | |
149 | return requested_backends.intersection(backends) |
|
149 | return requested_backends.intersection(backends) | |
150 |
|
150 | |||
151 |
|
151 | |||
152 | @pytest.fixture(scope='session', autouse=True) |
|
152 | @pytest.fixture(scope='session', autouse=True) | |
153 | def activate_example_rcextensions(request): |
|
153 | def activate_example_rcextensions(request): | |
154 | """ |
|
154 | """ | |
155 | Patch in an example rcextensions module which verifies passed in kwargs. |
|
155 | Patch in an example rcextensions module which verifies passed in kwargs. | |
156 | """ |
|
156 | """ | |
157 | from rhodecode.config import rcextensions |
|
157 | from rhodecode.config import rcextensions | |
158 |
|
158 | |||
159 | old_extensions = rhodecode.EXTENSIONS |
|
159 | old_extensions = rhodecode.EXTENSIONS | |
160 | rhodecode.EXTENSIONS = rcextensions |
|
160 | rhodecode.EXTENSIONS = rcextensions | |
161 | rhodecode.EXTENSIONS.calls = collections.defaultdict(list) |
|
161 | rhodecode.EXTENSIONS.calls = collections.defaultdict(list) | |
162 |
|
162 | |||
163 | @request.addfinalizer |
|
163 | @request.addfinalizer | |
164 | def cleanup(): |
|
164 | def cleanup(): | |
165 | rhodecode.EXTENSIONS = old_extensions |
|
165 | rhodecode.EXTENSIONS = old_extensions | |
166 |
|
166 | |||
167 |
|
167 | |||
168 | @pytest.fixture |
|
168 | @pytest.fixture | |
169 | def capture_rcextensions(): |
|
169 | def capture_rcextensions(): | |
170 | """ |
|
170 | """ | |
171 | Returns the recorded calls to entry points in rcextensions. |
|
171 | Returns the recorded calls to entry points in rcextensions. | |
172 | """ |
|
172 | """ | |
173 | calls = rhodecode.EXTENSIONS.calls |
|
173 | calls = rhodecode.EXTENSIONS.calls | |
174 | calls.clear() |
|
174 | calls.clear() | |
175 | # Note: At this moment, it is still the empty dict, but that will |
|
175 | # Note: At this moment, it is still the empty dict, but that will | |
176 | # be filled during the test run and since it is a reference this |
|
176 | # be filled during the test run and since it is a reference this | |
177 | # is enough to make it work. |
|
177 | # is enough to make it work. | |
178 | return calls |
|
178 | return calls | |
179 |
|
179 | |||
180 |
|
180 | |||
181 | @pytest.fixture(scope='session') |
|
181 | @pytest.fixture(scope='session') | |
182 | def http_environ_session(): |
|
182 | def http_environ_session(): | |
183 | """ |
|
183 | """ | |
184 | Allow to use "http_environ" in session scope. |
|
184 | Allow to use "http_environ" in session scope. | |
185 | """ |
|
185 | """ | |
186 | return plain_http_environ() |
|
186 | return plain_http_environ() | |
187 |
|
187 | |||
188 |
|
188 | |||
189 | def plain_http_host_stub(): |
|
189 | def plain_http_host_stub(): | |
190 | """ |
|
190 | """ | |
191 | Value of HTTP_HOST in the test run. |
|
191 | Value of HTTP_HOST in the test run. | |
192 | """ |
|
192 | """ | |
193 | return 'example.com:80' |
|
193 | return 'example.com:80' | |
194 |
|
194 | |||
195 |
|
195 | |||
196 | @pytest.fixture |
|
196 | @pytest.fixture | |
197 | def http_host_stub(): |
|
197 | def http_host_stub(): | |
198 | """ |
|
198 | """ | |
199 | Value of HTTP_HOST in the test run. |
|
199 | Value of HTTP_HOST in the test run. | |
200 | """ |
|
200 | """ | |
201 | return plain_http_host_stub() |
|
201 | return plain_http_host_stub() | |
202 |
|
202 | |||
203 |
|
203 | |||
204 | def plain_http_host_only_stub(): |
|
204 | def plain_http_host_only_stub(): | |
205 | """ |
|
205 | """ | |
206 | Value of HTTP_HOST in the test run. |
|
206 | Value of HTTP_HOST in the test run. | |
207 | """ |
|
207 | """ | |
208 | return plain_http_host_stub().split(':')[0] |
|
208 | return plain_http_host_stub().split(':')[0] | |
209 |
|
209 | |||
210 |
|
210 | |||
211 | @pytest.fixture |
|
211 | @pytest.fixture | |
212 | def http_host_only_stub(): |
|
212 | def http_host_only_stub(): | |
213 | """ |
|
213 | """ | |
214 | Value of HTTP_HOST in the test run. |
|
214 | Value of HTTP_HOST in the test run. | |
215 | """ |
|
215 | """ | |
216 | return plain_http_host_only_stub() |
|
216 | return plain_http_host_only_stub() | |
217 |
|
217 | |||
218 |
|
218 | |||
219 | def plain_http_environ(): |
|
219 | def plain_http_environ(): | |
220 | """ |
|
220 | """ | |
221 | HTTP extra environ keys. |
|
221 | HTTP extra environ keys. | |
222 |
|
222 | |||
223 | User by the test application and as well for setting up the pylons |
|
223 | User by the test application and as well for setting up the pylons | |
224 | environment. In the case of the fixture "app" it should be possible |
|
224 | environment. In the case of the fixture "app" it should be possible | |
225 | to override this for a specific test case. |
|
225 | to override this for a specific test case. | |
226 | """ |
|
226 | """ | |
227 | return { |
|
227 | return { | |
228 | 'SERVER_NAME': plain_http_host_only_stub(), |
|
228 | 'SERVER_NAME': plain_http_host_only_stub(), | |
229 | 'SERVER_PORT': plain_http_host_stub().split(':')[1], |
|
229 | 'SERVER_PORT': plain_http_host_stub().split(':')[1], | |
230 | 'HTTP_HOST': plain_http_host_stub(), |
|
230 | 'HTTP_HOST': plain_http_host_stub(), | |
231 | 'HTTP_USER_AGENT': 'rc-test-agent', |
|
231 | 'HTTP_USER_AGENT': 'rc-test-agent', | |
232 | 'REQUEST_METHOD': 'GET' |
|
232 | 'REQUEST_METHOD': 'GET' | |
233 | } |
|
233 | } | |
234 |
|
234 | |||
235 |
|
235 | |||
236 | @pytest.fixture |
|
236 | @pytest.fixture | |
237 | def http_environ(): |
|
237 | def http_environ(): | |
238 | """ |
|
238 | """ | |
239 | HTTP extra environ keys. |
|
239 | HTTP extra environ keys. | |
240 |
|
240 | |||
241 | User by the test application and as well for setting up the pylons |
|
241 | User by the test application and as well for setting up the pylons | |
242 | environment. In the case of the fixture "app" it should be possible |
|
242 | environment. In the case of the fixture "app" it should be possible | |
243 | to override this for a specific test case. |
|
243 | to override this for a specific test case. | |
244 | """ |
|
244 | """ | |
245 | return plain_http_environ() |
|
245 | return plain_http_environ() | |
246 |
|
246 | |||
247 |
|
247 | |||
248 | @pytest.fixture(scope='session') |
|
248 | @pytest.fixture(scope='session') | |
249 | def baseapp(ini_config, vcsserver, http_environ_session): |
|
249 | def baseapp(ini_config, vcsserver, http_environ_session): | |
250 | from rhodecode.lib.pyramid_utils import get_app_config |
|
250 | from rhodecode.lib.pyramid_utils import get_app_config | |
251 | from rhodecode.config.middleware import make_pyramid_app |
|
251 | from rhodecode.config.middleware import make_pyramid_app | |
252 |
|
252 | |||
253 | print("Using the RhodeCode configuration:{}".format(ini_config)) |
|
253 | print("Using the RhodeCode configuration:{}".format(ini_config)) | |
254 | pyramid.paster.setup_logging(ini_config) |
|
254 | pyramid.paster.setup_logging(ini_config) | |
255 |
|
255 | |||
256 | settings = get_app_config(ini_config) |
|
256 | settings = get_app_config(ini_config) | |
257 | app = make_pyramid_app({'__file__': ini_config}, **settings) |
|
257 | app = make_pyramid_app({'__file__': ini_config}, **settings) | |
258 |
|
258 | |||
259 | return app |
|
259 | return app | |
260 |
|
260 | |||
261 |
|
261 | |||
262 | @pytest.fixture(scope='function') |
|
262 | @pytest.fixture(scope='function') | |
263 | def app(request, config_stub, baseapp, http_environ): |
|
263 | def app(request, config_stub, baseapp, http_environ): | |
264 | app = CustomTestApp( |
|
264 | app = CustomTestApp( | |
265 | baseapp, |
|
265 | baseapp, | |
266 | extra_environ=http_environ) |
|
266 | extra_environ=http_environ) | |
267 | if request.cls: |
|
267 | if request.cls: | |
268 | request.cls.app = app |
|
268 | request.cls.app = app | |
269 | return app |
|
269 | return app | |
270 |
|
270 | |||
271 |
|
271 | |||
272 | @pytest.fixture(scope='session') |
|
272 | @pytest.fixture(scope='session') | |
273 | def app_settings(baseapp, ini_config): |
|
273 | def app_settings(baseapp, ini_config): | |
274 | """ |
|
274 | """ | |
275 | Settings dictionary used to create the app. |
|
275 | Settings dictionary used to create the app. | |
276 |
|
276 | |||
277 | Parses the ini file and passes the result through the sanitize and apply |
|
277 | Parses the ini file and passes the result through the sanitize and apply | |
278 | defaults mechanism in `rhodecode.config.middleware`. |
|
278 | defaults mechanism in `rhodecode.config.middleware`. | |
279 | """ |
|
279 | """ | |
280 | return baseapp.config.get_settings() |
|
280 | return baseapp.config.get_settings() | |
281 |
|
281 | |||
282 |
|
282 | |||
283 | @pytest.fixture(scope='session') |
|
283 | @pytest.fixture(scope='session') | |
284 | def db_connection(ini_settings): |
|
284 | def db_connection(ini_settings): | |
285 | # Initialize the database connection. |
|
285 | # Initialize the database connection. | |
286 | config_utils.initialize_database(ini_settings) |
|
286 | config_utils.initialize_database(ini_settings) | |
287 |
|
287 | |||
288 |
|
288 | |||
289 | LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user')) |
|
289 | LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user')) | |
290 |
|
290 | |||
291 |
|
291 | |||
292 | def _autologin_user(app, *args): |
|
292 | def _autologin_user(app, *args): | |
293 | session = login_user_session(app, *args) |
|
293 | session = login_user_session(app, *args) | |
294 | csrf_token = rhodecode.lib.auth.get_csrf_token(session) |
|
294 | csrf_token = rhodecode.lib.auth.get_csrf_token(session) | |
295 | return LoginData(csrf_token, session['rhodecode_user']) |
|
295 | return LoginData(csrf_token, session['rhodecode_user']) | |
296 |
|
296 | |||
297 |
|
297 | |||
298 | @pytest.fixture |
|
298 | @pytest.fixture | |
299 | def autologin_user(app): |
|
299 | def autologin_user(app): | |
300 | """ |
|
300 | """ | |
301 | Utility fixture which makes sure that the admin user is logged in |
|
301 | Utility fixture which makes sure that the admin user is logged in | |
302 | """ |
|
302 | """ | |
303 | return _autologin_user(app) |
|
303 | return _autologin_user(app) | |
304 |
|
304 | |||
305 |
|
305 | |||
306 | @pytest.fixture |
|
306 | @pytest.fixture | |
307 | def autologin_regular_user(app): |
|
307 | def autologin_regular_user(app): | |
308 | """ |
|
308 | """ | |
309 | Utility fixture which makes sure that the regular user is logged in |
|
309 | Utility fixture which makes sure that the regular user is logged in | |
310 | """ |
|
310 | """ | |
311 | return _autologin_user( |
|
311 | return _autologin_user( | |
312 | app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
312 | app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
313 |
|
313 | |||
314 |
|
314 | |||
315 | @pytest.fixture(scope='function') |
|
315 | @pytest.fixture(scope='function') | |
316 | def csrf_token(request, autologin_user): |
|
316 | def csrf_token(request, autologin_user): | |
317 | return autologin_user.csrf_token |
|
317 | return autologin_user.csrf_token | |
318 |
|
318 | |||
319 |
|
319 | |||
320 | @pytest.fixture(scope='function') |
|
320 | @pytest.fixture(scope='function') | |
321 | def xhr_header(request): |
|
321 | def xhr_header(request): | |
322 | return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'} |
|
322 | return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'} | |
323 |
|
323 | |||
324 |
|
324 | |||
325 | @pytest.fixture |
|
325 | @pytest.fixture | |
326 | def real_crypto_backend(monkeypatch): |
|
326 | def real_crypto_backend(monkeypatch): | |
327 | """ |
|
327 | """ | |
328 | Switch the production crypto backend on for this test. |
|
328 | Switch the production crypto backend on for this test. | |
329 |
|
329 | |||
330 | During the test run the crypto backend is replaced with a faster |
|
330 | During the test run the crypto backend is replaced with a faster | |
331 | implementation based on the MD5 algorithm. |
|
331 | implementation based on the MD5 algorithm. | |
332 | """ |
|
332 | """ | |
333 | monkeypatch.setattr(rhodecode, 'is_test', False) |
|
333 | monkeypatch.setattr(rhodecode, 'is_test', False) | |
334 |
|
334 | |||
335 |
|
335 | |||
336 | @pytest.fixture(scope='class') |
|
336 | @pytest.fixture(scope='class') | |
337 | def index_location(request, baseapp): |
|
337 | def index_location(request, baseapp): | |
338 | index_location = baseapp.config.get_settings()['search.location'] |
|
338 | index_location = baseapp.config.get_settings()['search.location'] | |
339 | if request.cls: |
|
339 | if request.cls: | |
340 | request.cls.index_location = index_location |
|
340 | request.cls.index_location = index_location | |
341 | return index_location |
|
341 | return index_location | |
342 |
|
342 | |||
343 |
|
343 | |||
344 | @pytest.fixture(scope='session', autouse=True) |
|
344 | @pytest.fixture(scope='session', autouse=True) | |
345 | def tests_tmp_path(request): |
|
345 | def tests_tmp_path(request): | |
346 | """ |
|
346 | """ | |
347 | Create temporary directory to be used during the test session. |
|
347 | Create temporary directory to be used during the test session. | |
348 | """ |
|
348 | """ | |
349 | if not os.path.exists(TESTS_TMP_PATH): |
|
349 | if not os.path.exists(TESTS_TMP_PATH): | |
350 | os.makedirs(TESTS_TMP_PATH) |
|
350 | os.makedirs(TESTS_TMP_PATH) | |
351 |
|
351 | |||
352 | if not request.config.getoption('--keep-tmp-path'): |
|
352 | if not request.config.getoption('--keep-tmp-path'): | |
353 | @request.addfinalizer |
|
353 | @request.addfinalizer | |
354 | def remove_tmp_path(): |
|
354 | def remove_tmp_path(): | |
355 | shutil.rmtree(TESTS_TMP_PATH) |
|
355 | shutil.rmtree(TESTS_TMP_PATH) | |
356 |
|
356 | |||
357 | return TESTS_TMP_PATH |
|
357 | return TESTS_TMP_PATH | |
358 |
|
358 | |||
359 |
|
359 | |||
360 | @pytest.fixture |
|
360 | @pytest.fixture | |
361 | def test_repo_group(request): |
|
361 | def test_repo_group(request): | |
362 | """ |
|
362 | """ | |
363 | Create a temporary repository group, and destroy it after |
|
363 | Create a temporary repository group, and destroy it after | |
364 | usage automatically |
|
364 | usage automatically | |
365 | """ |
|
365 | """ | |
366 | fixture = Fixture() |
|
366 | fixture = Fixture() | |
367 | repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '') |
|
367 | repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '') | |
368 | repo_group = fixture.create_repo_group(repogroupid) |
|
368 | repo_group = fixture.create_repo_group(repogroupid) | |
369 |
|
369 | |||
370 | def _cleanup(): |
|
370 | def _cleanup(): | |
371 | fixture.destroy_repo_group(repogroupid) |
|
371 | fixture.destroy_repo_group(repogroupid) | |
372 |
|
372 | |||
373 | request.addfinalizer(_cleanup) |
|
373 | request.addfinalizer(_cleanup) | |
374 | return repo_group |
|
374 | return repo_group | |
375 |
|
375 | |||
376 |
|
376 | |||
377 | @pytest.fixture |
|
377 | @pytest.fixture | |
378 | def test_user_group(request): |
|
378 | def test_user_group(request): | |
379 | """ |
|
379 | """ | |
380 | Create a temporary user group, and destroy it after |
|
380 | Create a temporary user group, and destroy it after | |
381 | usage automatically |
|
381 | usage automatically | |
382 | """ |
|
382 | """ | |
383 | fixture = Fixture() |
|
383 | fixture = Fixture() | |
384 | usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '') |
|
384 | usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '') | |
385 | user_group = fixture.create_user_group(usergroupid) |
|
385 | user_group = fixture.create_user_group(usergroupid) | |
386 |
|
386 | |||
387 | def _cleanup(): |
|
387 | def _cleanup(): | |
388 | fixture.destroy_user_group(user_group) |
|
388 | fixture.destroy_user_group(user_group) | |
389 |
|
389 | |||
390 | request.addfinalizer(_cleanup) |
|
390 | request.addfinalizer(_cleanup) | |
391 | return user_group |
|
391 | return user_group | |
392 |
|
392 | |||
393 |
|
393 | |||
394 | @pytest.fixture(scope='session') |
|
394 | @pytest.fixture(scope='session') | |
395 | def test_repo(request): |
|
395 | def test_repo(request): | |
396 | container = TestRepoContainer() |
|
396 | container = TestRepoContainer() | |
397 | request.addfinalizer(container._cleanup) |
|
397 | request.addfinalizer(container._cleanup) | |
398 | return container |
|
398 | return container | |
399 |
|
399 | |||
400 |
|
400 | |||
401 | class TestRepoContainer(object): |
|
401 | class TestRepoContainer(object): | |
402 | """ |
|
402 | """ | |
403 | Container for test repositories which are used read only. |
|
403 | Container for test repositories which are used read only. | |
404 |
|
404 | |||
405 | Repositories will be created on demand and re-used during the lifetime |
|
405 | Repositories will be created on demand and re-used during the lifetime | |
406 | of this object. |
|
406 | of this object. | |
407 |
|
407 | |||
408 | Usage to get the svn test repository "minimal":: |
|
408 | Usage to get the svn test repository "minimal":: | |
409 |
|
409 | |||
410 | test_repo = TestContainer() |
|
410 | test_repo = TestContainer() | |
411 | repo = test_repo('minimal', 'svn') |
|
411 | repo = test_repo('minimal', 'svn') | |
412 |
|
412 | |||
413 | """ |
|
413 | """ | |
414 |
|
414 | |||
415 | dump_extractors = { |
|
415 | dump_extractors = { | |
416 | 'git': utils.extract_git_repo_from_dump, |
|
416 | 'git': utils.extract_git_repo_from_dump, | |
417 | 'hg': utils.extract_hg_repo_from_dump, |
|
417 | 'hg': utils.extract_hg_repo_from_dump, | |
418 | 'svn': utils.extract_svn_repo_from_dump, |
|
418 | 'svn': utils.extract_svn_repo_from_dump, | |
419 | } |
|
419 | } | |
420 |
|
420 | |||
421 | def __init__(self): |
|
421 | def __init__(self): | |
422 | self._cleanup_repos = [] |
|
422 | self._cleanup_repos = [] | |
423 | self._fixture = Fixture() |
|
423 | self._fixture = Fixture() | |
424 | self._repos = {} |
|
424 | self._repos = {} | |
425 |
|
425 | |||
426 | def __call__(self, dump_name, backend_alias, config=None): |
|
426 | def __call__(self, dump_name, backend_alias, config=None): | |
427 | key = (dump_name, backend_alias) |
|
427 | key = (dump_name, backend_alias) | |
428 | if key not in self._repos: |
|
428 | if key not in self._repos: | |
429 | repo = self._create_repo(dump_name, backend_alias, config) |
|
429 | repo = self._create_repo(dump_name, backend_alias, config) | |
430 | self._repos[key] = repo.repo_id |
|
430 | self._repos[key] = repo.repo_id | |
431 | return Repository.get(self._repos[key]) |
|
431 | return Repository.get(self._repos[key]) | |
432 |
|
432 | |||
433 | def _create_repo(self, dump_name, backend_alias, config): |
|
433 | def _create_repo(self, dump_name, backend_alias, config): | |
434 | repo_name = '%s-%s' % (backend_alias, dump_name) |
|
434 | repo_name = '%s-%s' % (backend_alias, dump_name) | |
435 |
backend |
|
435 | backend = get_backend(backend_alias) | |
436 | dump_extractor = self.dump_extractors[backend_alias] |
|
436 | dump_extractor = self.dump_extractors[backend_alias] | |
437 | repo_path = dump_extractor(dump_name, repo_name) |
|
437 | repo_path = dump_extractor(dump_name, repo_name) | |
438 |
|
438 | |||
439 |
vcs_repo = backend |
|
439 | vcs_repo = backend(repo_path, config=config) | |
440 | repo2db_mapper({repo_name: vcs_repo}) |
|
440 | repo2db_mapper({repo_name: vcs_repo}) | |
441 |
|
441 | |||
442 | repo = RepoModel().get_by_repo_name(repo_name) |
|
442 | repo = RepoModel().get_by_repo_name(repo_name) | |
443 | self._cleanup_repos.append(repo_name) |
|
443 | self._cleanup_repos.append(repo_name) | |
444 | return repo |
|
444 | return repo | |
445 |
|
445 | |||
446 | def _cleanup(self): |
|
446 | def _cleanup(self): | |
447 | for repo_name in reversed(self._cleanup_repos): |
|
447 | for repo_name in reversed(self._cleanup_repos): | |
448 | self._fixture.destroy_repo(repo_name) |
|
448 | self._fixture.destroy_repo(repo_name) | |
449 |
|
449 | |||
450 |
|
450 | |||
451 | def backend_base(request, backend_alias, baseapp, test_repo): |
|
451 | def backend_base(request, backend_alias, baseapp, test_repo): | |
452 | if backend_alias not in request.config.getoption('--backends'): |
|
452 | if backend_alias not in request.config.getoption('--backends'): | |
453 | pytest.skip("Backend %s not selected." % (backend_alias, )) |
|
453 | pytest.skip("Backend %s not selected." % (backend_alias, )) | |
454 |
|
454 | |||
455 | utils.check_xfail_backends(request.node, backend_alias) |
|
455 | utils.check_xfail_backends(request.node, backend_alias) | |
456 | utils.check_skip_backends(request.node, backend_alias) |
|
456 | utils.check_skip_backends(request.node, backend_alias) | |
457 |
|
457 | |||
458 | repo_name = 'vcs_test_%s' % (backend_alias, ) |
|
458 | repo_name = 'vcs_test_%s' % (backend_alias, ) | |
459 | backend = Backend( |
|
459 | backend = Backend( | |
460 | alias=backend_alias, |
|
460 | alias=backend_alias, | |
461 | repo_name=repo_name, |
|
461 | repo_name=repo_name, | |
462 | test_name=request.node.name, |
|
462 | test_name=request.node.name, | |
463 | test_repo_container=test_repo) |
|
463 | test_repo_container=test_repo) | |
464 | request.addfinalizer(backend.cleanup) |
|
464 | request.addfinalizer(backend.cleanup) | |
465 | return backend |
|
465 | return backend | |
466 |
|
466 | |||
467 |
|
467 | |||
468 | @pytest.fixture |
|
468 | @pytest.fixture | |
469 | def backend(request, backend_alias, baseapp, test_repo): |
|
469 | def backend(request, backend_alias, baseapp, test_repo): | |
470 | """ |
|
470 | """ | |
471 | Parametrized fixture which represents a single backend implementation. |
|
471 | Parametrized fixture which represents a single backend implementation. | |
472 |
|
472 | |||
473 | It respects the option `--backends` to focus the test run on specific |
|
473 | It respects the option `--backends` to focus the test run on specific | |
474 | backend implementations. |
|
474 | backend implementations. | |
475 |
|
475 | |||
476 | It also supports `pytest.mark.xfail_backends` to mark tests as failing |
|
476 | It also supports `pytest.mark.xfail_backends` to mark tests as failing | |
477 | for specific backends. This is intended as a utility for incremental |
|
477 | for specific backends. This is intended as a utility for incremental | |
478 | development of a new backend implementation. |
|
478 | development of a new backend implementation. | |
479 | """ |
|
479 | """ | |
480 | return backend_base(request, backend_alias, baseapp, test_repo) |
|
480 | return backend_base(request, backend_alias, baseapp, test_repo) | |
481 |
|
481 | |||
482 |
|
482 | |||
483 | @pytest.fixture |
|
483 | @pytest.fixture | |
484 | def backend_git(request, baseapp, test_repo): |
|
484 | def backend_git(request, baseapp, test_repo): | |
485 | return backend_base(request, 'git', baseapp, test_repo) |
|
485 | return backend_base(request, 'git', baseapp, test_repo) | |
486 |
|
486 | |||
487 |
|
487 | |||
488 | @pytest.fixture |
|
488 | @pytest.fixture | |
489 | def backend_hg(request, baseapp, test_repo): |
|
489 | def backend_hg(request, baseapp, test_repo): | |
490 | return backend_base(request, 'hg', baseapp, test_repo) |
|
490 | return backend_base(request, 'hg', baseapp, test_repo) | |
491 |
|
491 | |||
492 |
|
492 | |||
493 | @pytest.fixture |
|
493 | @pytest.fixture | |
494 | def backend_svn(request, baseapp, test_repo): |
|
494 | def backend_svn(request, baseapp, test_repo): | |
495 | return backend_base(request, 'svn', baseapp, test_repo) |
|
495 | return backend_base(request, 'svn', baseapp, test_repo) | |
496 |
|
496 | |||
497 |
|
497 | |||
498 | @pytest.fixture |
|
498 | @pytest.fixture | |
499 | def backend_random(backend_git): |
|
499 | def backend_random(backend_git): | |
500 | """ |
|
500 | """ | |
501 | Use this to express that your tests need "a backend. |
|
501 | Use this to express that your tests need "a backend. | |
502 |
|
502 | |||
503 | A few of our tests need a backend, so that we can run the code. This |
|
503 | A few of our tests need a backend, so that we can run the code. This | |
504 | fixture is intended to be used for such cases. It will pick one of the |
|
504 | fixture is intended to be used for such cases. It will pick one of the | |
505 | backends and run the tests. |
|
505 | backends and run the tests. | |
506 |
|
506 | |||
507 | The fixture `backend` would run the test multiple times for each |
|
507 | The fixture `backend` would run the test multiple times for each | |
508 | available backend which is a pure waste of time if the test is |
|
508 | available backend which is a pure waste of time if the test is | |
509 | independent of the backend type. |
|
509 | independent of the backend type. | |
510 | """ |
|
510 | """ | |
511 | # TODO: johbo: Change this to pick a random backend |
|
511 | # TODO: johbo: Change this to pick a random backend | |
512 | return backend_git |
|
512 | return backend_git | |
513 |
|
513 | |||
514 |
|
514 | |||
515 | @pytest.fixture |
|
515 | @pytest.fixture | |
516 | def backend_stub(backend_git): |
|
516 | def backend_stub(backend_git): | |
517 | """ |
|
517 | """ | |
518 | Use this to express that your tests need a backend stub |
|
518 | Use this to express that your tests need a backend stub | |
519 |
|
519 | |||
520 | TODO: mikhail: Implement a real stub logic instead of returning |
|
520 | TODO: mikhail: Implement a real stub logic instead of returning | |
521 | a git backend |
|
521 | a git backend | |
522 | """ |
|
522 | """ | |
523 | return backend_git |
|
523 | return backend_git | |
524 |
|
524 | |||
525 |
|
525 | |||
526 | @pytest.fixture |
|
526 | @pytest.fixture | |
527 | def repo_stub(backend_stub): |
|
527 | def repo_stub(backend_stub): | |
528 | """ |
|
528 | """ | |
529 | Use this to express that your tests need a repository stub |
|
529 | Use this to express that your tests need a repository stub | |
530 | """ |
|
530 | """ | |
531 | return backend_stub.create_repo() |
|
531 | return backend_stub.create_repo() | |
532 |
|
532 | |||
533 |
|
533 | |||
534 | class Backend(object): |
|
534 | class Backend(object): | |
535 | """ |
|
535 | """ | |
536 | Represents the test configuration for one supported backend |
|
536 | Represents the test configuration for one supported backend | |
537 |
|
537 | |||
538 | Provides easy access to different test repositories based on |
|
538 | Provides easy access to different test repositories based on | |
539 | `__getitem__`. Such repositories will only be created once per test |
|
539 | `__getitem__`. Such repositories will only be created once per test | |
540 | session. |
|
540 | session. | |
541 | """ |
|
541 | """ | |
542 |
|
542 | |||
543 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') |
|
543 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') | |
544 | _master_repo = None |
|
544 | _master_repo = None | |
545 | _commit_ids = {} |
|
545 | _commit_ids = {} | |
546 |
|
546 | |||
547 | def __init__(self, alias, repo_name, test_name, test_repo_container): |
|
547 | def __init__(self, alias, repo_name, test_name, test_repo_container): | |
548 | self.alias = alias |
|
548 | self.alias = alias | |
549 | self.repo_name = repo_name |
|
549 | self.repo_name = repo_name | |
550 | self._cleanup_repos = [] |
|
550 | self._cleanup_repos = [] | |
551 | self._test_name = test_name |
|
551 | self._test_name = test_name | |
552 | self._test_repo_container = test_repo_container |
|
552 | self._test_repo_container = test_repo_container | |
553 | # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or |
|
553 | # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or | |
554 | # Fixture will survive in the end. |
|
554 | # Fixture will survive in the end. | |
555 | self._fixture = Fixture() |
|
555 | self._fixture = Fixture() | |
556 |
|
556 | |||
557 | def __getitem__(self, key): |
|
557 | def __getitem__(self, key): | |
558 | return self._test_repo_container(key, self.alias) |
|
558 | return self._test_repo_container(key, self.alias) | |
559 |
|
559 | |||
560 | def create_test_repo(self, key, config=None): |
|
560 | def create_test_repo(self, key, config=None): | |
561 | return self._test_repo_container(key, self.alias, config) |
|
561 | return self._test_repo_container(key, self.alias, config) | |
562 |
|
562 | |||
563 | @property |
|
563 | @property | |
564 | def repo(self): |
|
564 | def repo(self): | |
565 | """ |
|
565 | """ | |
566 | Returns the "current" repository. This is the vcs_test repo or the |
|
566 | Returns the "current" repository. This is the vcs_test repo or the | |
567 | last repo which has been created with `create_repo`. |
|
567 | last repo which has been created with `create_repo`. | |
568 | """ |
|
568 | """ | |
569 | from rhodecode.model.db import Repository |
|
569 | from rhodecode.model.db import Repository | |
570 | return Repository.get_by_repo_name(self.repo_name) |
|
570 | return Repository.get_by_repo_name(self.repo_name) | |
571 |
|
571 | |||
572 | @property |
|
572 | @property | |
573 | def default_branch_name(self): |
|
573 | def default_branch_name(self): | |
574 | VcsRepository = get_backend(self.alias) |
|
574 | VcsRepository = get_backend(self.alias) | |
575 | return VcsRepository.DEFAULT_BRANCH_NAME |
|
575 | return VcsRepository.DEFAULT_BRANCH_NAME | |
576 |
|
576 | |||
577 | @property |
|
577 | @property | |
578 | def default_head_id(self): |
|
578 | def default_head_id(self): | |
579 | """ |
|
579 | """ | |
580 | Returns the default head id of the underlying backend. |
|
580 | Returns the default head id of the underlying backend. | |
581 |
|
581 | |||
582 | This will be the default branch name in case the backend does have a |
|
582 | This will be the default branch name in case the backend does have a | |
583 | default branch. In the other cases it will point to a valid head |
|
583 | default branch. In the other cases it will point to a valid head | |
584 | which can serve as the base to create a new commit on top of it. |
|
584 | which can serve as the base to create a new commit on top of it. | |
585 | """ |
|
585 | """ | |
586 | vcsrepo = self.repo.scm_instance() |
|
586 | vcsrepo = self.repo.scm_instance() | |
587 | head_id = ( |
|
587 | head_id = ( | |
588 | vcsrepo.DEFAULT_BRANCH_NAME or |
|
588 | vcsrepo.DEFAULT_BRANCH_NAME or | |
589 | vcsrepo.commit_ids[-1]) |
|
589 | vcsrepo.commit_ids[-1]) | |
590 | return head_id |
|
590 | return head_id | |
591 |
|
591 | |||
592 | @property |
|
592 | @property | |
593 | def commit_ids(self): |
|
593 | def commit_ids(self): | |
594 | """ |
|
594 | """ | |
595 | Returns the list of commits for the last created repository |
|
595 | Returns the list of commits for the last created repository | |
596 | """ |
|
596 | """ | |
597 | return self._commit_ids |
|
597 | return self._commit_ids | |
598 |
|
598 | |||
599 | def create_master_repo(self, commits): |
|
599 | def create_master_repo(self, commits): | |
600 | """ |
|
600 | """ | |
601 | Create a repository and remember it as a template. |
|
601 | Create a repository and remember it as a template. | |
602 |
|
602 | |||
603 | This allows to easily create derived repositories to construct |
|
603 | This allows to easily create derived repositories to construct | |
604 | more complex scenarios for diff, compare and pull requests. |
|
604 | more complex scenarios for diff, compare and pull requests. | |
605 |
|
605 | |||
606 | Returns a commit map which maps from commit message to raw_id. |
|
606 | Returns a commit map which maps from commit message to raw_id. | |
607 | """ |
|
607 | """ | |
608 | self._master_repo = self.create_repo(commits=commits) |
|
608 | self._master_repo = self.create_repo(commits=commits) | |
609 | return self._commit_ids |
|
609 | return self._commit_ids | |
610 |
|
610 | |||
611 | def create_repo( |
|
611 | def create_repo( | |
612 | self, commits=None, number_of_commits=0, heads=None, |
|
612 | self, commits=None, number_of_commits=0, heads=None, | |
613 | name_suffix=u'', bare=False, **kwargs): |
|
613 | name_suffix=u'', bare=False, **kwargs): | |
614 | """ |
|
614 | """ | |
615 | Create a repository and record it for later cleanup. |
|
615 | Create a repository and record it for later cleanup. | |
616 |
|
616 | |||
617 | :param commits: Optional. A sequence of dict instances. |
|
617 | :param commits: Optional. A sequence of dict instances. | |
618 | Will add a commit per entry to the new repository. |
|
618 | Will add a commit per entry to the new repository. | |
619 | :param number_of_commits: Optional. If set to a number, this number of |
|
619 | :param number_of_commits: Optional. If set to a number, this number of | |
620 | commits will be added to the new repository. |
|
620 | commits will be added to the new repository. | |
621 | :param heads: Optional. Can be set to a sequence of of commit |
|
621 | :param heads: Optional. Can be set to a sequence of of commit | |
622 | names which shall be pulled in from the master repository. |
|
622 | names which shall be pulled in from the master repository. | |
623 | :param name_suffix: adds special suffix to generated repo name |
|
623 | :param name_suffix: adds special suffix to generated repo name | |
624 | :param bare: set a repo as bare (no checkout) |
|
624 | :param bare: set a repo as bare (no checkout) | |
625 | """ |
|
625 | """ | |
626 | self.repo_name = self._next_repo_name() + name_suffix |
|
626 | self.repo_name = self._next_repo_name() + name_suffix | |
627 | repo = self._fixture.create_repo( |
|
627 | repo = self._fixture.create_repo( | |
628 | self.repo_name, repo_type=self.alias, bare=bare, **kwargs) |
|
628 | self.repo_name, repo_type=self.alias, bare=bare, **kwargs) | |
629 | self._cleanup_repos.append(repo.repo_name) |
|
629 | self._cleanup_repos.append(repo.repo_name) | |
630 |
|
630 | |||
631 | commits = commits or [ |
|
631 | commits = commits or [ | |
632 | {'message': 'Commit %s of %s' % (x, self.repo_name)} |
|
632 | {'message': 'Commit %s of %s' % (x, self.repo_name)} | |
633 | for x in range(number_of_commits)] |
|
633 | for x in range(number_of_commits)] | |
634 | vcs_repo = repo.scm_instance() |
|
634 | vcs_repo = repo.scm_instance() | |
635 | vcs_repo.count() |
|
635 | vcs_repo.count() | |
636 | self._add_commits_to_repo(vcs_repo, commits) |
|
636 | self._add_commits_to_repo(vcs_repo, commits) | |
637 | if heads: |
|
637 | if heads: | |
638 | self.pull_heads(repo, heads) |
|
638 | self.pull_heads(repo, heads) | |
639 |
|
639 | |||
640 | return repo |
|
640 | return repo | |
641 |
|
641 | |||
642 | def pull_heads(self, repo, heads): |
|
642 | def pull_heads(self, repo, heads): | |
643 | """ |
|
643 | """ | |
644 | Make sure that repo contains all commits mentioned in `heads` |
|
644 | Make sure that repo contains all commits mentioned in `heads` | |
645 | """ |
|
645 | """ | |
646 | vcsmaster = self._master_repo.scm_instance() |
|
646 | vcsmaster = self._master_repo.scm_instance() | |
647 | vcsrepo = repo.scm_instance() |
|
647 | vcsrepo = repo.scm_instance() | |
648 | vcsrepo.config.clear_section('hooks') |
|
648 | vcsrepo.config.clear_section('hooks') | |
649 | commit_ids = [self._commit_ids[h] for h in heads] |
|
649 | commit_ids = [self._commit_ids[h] for h in heads] | |
650 | vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids) |
|
650 | vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids) | |
651 |
|
651 | |||
652 | def create_fork(self): |
|
652 | def create_fork(self): | |
653 | repo_to_fork = self.repo_name |
|
653 | repo_to_fork = self.repo_name | |
654 | self.repo_name = self._next_repo_name() |
|
654 | self.repo_name = self._next_repo_name() | |
655 | repo = self._fixture.create_fork(repo_to_fork, self.repo_name) |
|
655 | repo = self._fixture.create_fork(repo_to_fork, self.repo_name) | |
656 | self._cleanup_repos.append(self.repo_name) |
|
656 | self._cleanup_repos.append(self.repo_name) | |
657 | return repo |
|
657 | return repo | |
658 |
|
658 | |||
659 | def new_repo_name(self, suffix=u''): |
|
659 | def new_repo_name(self, suffix=u''): | |
660 | self.repo_name = self._next_repo_name() + suffix |
|
660 | self.repo_name = self._next_repo_name() + suffix | |
661 | self._cleanup_repos.append(self.repo_name) |
|
661 | self._cleanup_repos.append(self.repo_name) | |
662 | return self.repo_name |
|
662 | return self.repo_name | |
663 |
|
663 | |||
664 | def _next_repo_name(self): |
|
664 | def _next_repo_name(self): | |
665 | return u"%s_%s" % ( |
|
665 | return u"%s_%s" % ( | |
666 | self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos)) |
|
666 | self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos)) | |
667 |
|
667 | |||
668 | def ensure_file(self, filename, content='Test content\n'): |
|
668 | def ensure_file(self, filename, content='Test content\n'): | |
669 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" |
|
669 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" | |
670 | commits = [ |
|
670 | commits = [ | |
671 | {'added': [ |
|
671 | {'added': [ | |
672 | FileNode(filename, content=content), |
|
672 | FileNode(filename, content=content), | |
673 | ]}, |
|
673 | ]}, | |
674 | ] |
|
674 | ] | |
675 | self._add_commits_to_repo(self.repo.scm_instance(), commits) |
|
675 | self._add_commits_to_repo(self.repo.scm_instance(), commits) | |
676 |
|
676 | |||
677 | def enable_downloads(self): |
|
677 | def enable_downloads(self): | |
678 | repo = self.repo |
|
678 | repo = self.repo | |
679 | repo.enable_downloads = True |
|
679 | repo.enable_downloads = True | |
680 | Session().add(repo) |
|
680 | Session().add(repo) | |
681 | Session().commit() |
|
681 | Session().commit() | |
682 |
|
682 | |||
683 | def cleanup(self): |
|
683 | def cleanup(self): | |
684 | for repo_name in reversed(self._cleanup_repos): |
|
684 | for repo_name in reversed(self._cleanup_repos): | |
685 | self._fixture.destroy_repo(repo_name) |
|
685 | self._fixture.destroy_repo(repo_name) | |
686 |
|
686 | |||
687 | def _add_commits_to_repo(self, repo, commits): |
|
687 | def _add_commits_to_repo(self, repo, commits): | |
688 | commit_ids = _add_commits_to_repo(repo, commits) |
|
688 | commit_ids = _add_commits_to_repo(repo, commits) | |
689 | if not commit_ids: |
|
689 | if not commit_ids: | |
690 | return |
|
690 | return | |
691 | self._commit_ids = commit_ids |
|
691 | self._commit_ids = commit_ids | |
692 |
|
692 | |||
693 | # Creating refs for Git to allow fetching them from remote repository |
|
693 | # Creating refs for Git to allow fetching them from remote repository | |
694 | if self.alias == 'git': |
|
694 | if self.alias == 'git': | |
695 | refs = {} |
|
695 | refs = {} | |
696 | for message in self._commit_ids: |
|
696 | for message in self._commit_ids: | |
697 | # TODO: mikhail: do more special chars replacements |
|
697 | # TODO: mikhail: do more special chars replacements | |
698 | ref_name = 'refs/test-refs/{}'.format( |
|
698 | ref_name = 'refs/test-refs/{}'.format( | |
699 | message.replace(' ', '')) |
|
699 | message.replace(' ', '')) | |
700 | refs[ref_name] = self._commit_ids[message] |
|
700 | refs[ref_name] = self._commit_ids[message] | |
701 | self._create_refs(repo, refs) |
|
701 | self._create_refs(repo, refs) | |
702 |
|
702 | |||
703 | def _create_refs(self, repo, refs): |
|
703 | def _create_refs(self, repo, refs): | |
704 | for ref_name in refs: |
|
704 | for ref_name in refs: | |
705 | repo.set_refs(ref_name, refs[ref_name]) |
|
705 | repo.set_refs(ref_name, refs[ref_name]) | |
706 |
|
706 | |||
707 |
|
707 | |||
708 | def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo): |
|
708 | def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo): | |
709 | if backend_alias not in request.config.getoption('--backends'): |
|
709 | if backend_alias not in request.config.getoption('--backends'): | |
710 | pytest.skip("Backend %s not selected." % (backend_alias, )) |
|
710 | pytest.skip("Backend %s not selected." % (backend_alias, )) | |
711 |
|
711 | |||
712 | utils.check_xfail_backends(request.node, backend_alias) |
|
712 | utils.check_xfail_backends(request.node, backend_alias) | |
713 | utils.check_skip_backends(request.node, backend_alias) |
|
713 | utils.check_skip_backends(request.node, backend_alias) | |
714 |
|
714 | |||
715 | repo_name = 'vcs_test_%s' % (backend_alias, ) |
|
715 | repo_name = 'vcs_test_%s' % (backend_alias, ) | |
716 | repo_path = os.path.join(tests_tmp_path, repo_name) |
|
716 | repo_path = os.path.join(tests_tmp_path, repo_name) | |
717 | backend = VcsBackend( |
|
717 | backend = VcsBackend( | |
718 | alias=backend_alias, |
|
718 | alias=backend_alias, | |
719 | repo_path=repo_path, |
|
719 | repo_path=repo_path, | |
720 | test_name=request.node.name, |
|
720 | test_name=request.node.name, | |
721 | test_repo_container=test_repo) |
|
721 | test_repo_container=test_repo) | |
722 | request.addfinalizer(backend.cleanup) |
|
722 | request.addfinalizer(backend.cleanup) | |
723 | return backend |
|
723 | return backend | |
724 |
|
724 | |||
725 |
|
725 | |||
726 | @pytest.fixture |
|
726 | @pytest.fixture | |
727 | def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo): |
|
727 | def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo): | |
728 | """ |
|
728 | """ | |
729 | Parametrized fixture which represents a single vcs backend implementation. |
|
729 | Parametrized fixture which represents a single vcs backend implementation. | |
730 |
|
730 | |||
731 | See the fixture `backend` for more details. This one implements the same |
|
731 | See the fixture `backend` for more details. This one implements the same | |
732 | concept, but on vcs level. So it does not provide model instances etc. |
|
732 | concept, but on vcs level. So it does not provide model instances etc. | |
733 |
|
733 | |||
734 | Parameters are generated dynamically, see :func:`pytest_generate_tests` |
|
734 | Parameters are generated dynamically, see :func:`pytest_generate_tests` | |
735 | for how this works. |
|
735 | for how this works. | |
736 | """ |
|
736 | """ | |
737 | return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) |
|
737 | return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) | |
738 |
|
738 | |||
739 |
|
739 | |||
740 | @pytest.fixture |
|
740 | @pytest.fixture | |
741 | def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo): |
|
741 | def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo): | |
742 | return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo) |
|
742 | return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo) | |
743 |
|
743 | |||
744 |
|
744 | |||
745 | @pytest.fixture |
|
745 | @pytest.fixture | |
746 | def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo): |
|
746 | def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo): | |
747 | return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo) |
|
747 | return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo) | |
748 |
|
748 | |||
749 |
|
749 | |||
750 | @pytest.fixture |
|
750 | @pytest.fixture | |
751 | def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo): |
|
751 | def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo): | |
752 | return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo) |
|
752 | return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo) | |
753 |
|
753 | |||
754 |
|
754 | |||
755 | @pytest.fixture |
|
755 | @pytest.fixture | |
756 | def vcsbackend_stub(vcsbackend_git): |
|
756 | def vcsbackend_stub(vcsbackend_git): | |
757 | """ |
|
757 | """ | |
758 | Use this to express that your test just needs a stub of a vcsbackend. |
|
758 | Use this to express that your test just needs a stub of a vcsbackend. | |
759 |
|
759 | |||
760 | Plan is to eventually implement an in-memory stub to speed tests up. |
|
760 | Plan is to eventually implement an in-memory stub to speed tests up. | |
761 | """ |
|
761 | """ | |
762 | return vcsbackend_git |
|
762 | return vcsbackend_git | |
763 |
|
763 | |||
764 |
|
764 | |||
765 | class VcsBackend(object): |
|
765 | class VcsBackend(object): | |
766 | """ |
|
766 | """ | |
767 | Represents the test configuration for one supported vcs backend. |
|
767 | Represents the test configuration for one supported vcs backend. | |
768 | """ |
|
768 | """ | |
769 |
|
769 | |||
770 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') |
|
770 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') | |
771 |
|
771 | |||
772 | def __init__(self, alias, repo_path, test_name, test_repo_container): |
|
772 | def __init__(self, alias, repo_path, test_name, test_repo_container): | |
773 | self.alias = alias |
|
773 | self.alias = alias | |
774 | self._repo_path = repo_path |
|
774 | self._repo_path = repo_path | |
775 | self._cleanup_repos = [] |
|
775 | self._cleanup_repos = [] | |
776 | self._test_name = test_name |
|
776 | self._test_name = test_name | |
777 | self._test_repo_container = test_repo_container |
|
777 | self._test_repo_container = test_repo_container | |
778 |
|
778 | |||
779 | def __getitem__(self, key): |
|
779 | def __getitem__(self, key): | |
780 | return self._test_repo_container(key, self.alias).scm_instance() |
|
780 | return self._test_repo_container(key, self.alias).scm_instance() | |
781 |
|
781 | |||
782 | @property |
|
782 | @property | |
783 | def repo(self): |
|
783 | def repo(self): | |
784 | """ |
|
784 | """ | |
785 | Returns the "current" repository. This is the vcs_test repo of the last |
|
785 | Returns the "current" repository. This is the vcs_test repo of the last | |
786 | repo which has been created. |
|
786 | repo which has been created. | |
787 | """ |
|
787 | """ | |
788 | Repository = get_backend(self.alias) |
|
788 | Repository = get_backend(self.alias) | |
789 | return Repository(self._repo_path) |
|
789 | return Repository(self._repo_path) | |
790 |
|
790 | |||
791 | @property |
|
791 | @property | |
792 | def backend(self): |
|
792 | def backend(self): | |
793 | """ |
|
793 | """ | |
794 | Returns the backend implementation class. |
|
794 | Returns the backend implementation class. | |
795 | """ |
|
795 | """ | |
796 | return get_backend(self.alias) |
|
796 | return get_backend(self.alias) | |
797 |
|
797 | |||
798 | def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None, |
|
798 | def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None, | |
799 | bare=False): |
|
799 | bare=False): | |
800 | repo_name = self._next_repo_name() |
|
800 | repo_name = self._next_repo_name() | |
801 | self._repo_path = get_new_dir(repo_name) |
|
801 | self._repo_path = get_new_dir(repo_name) | |
802 | repo_class = get_backend(self.alias) |
|
802 | repo_class = get_backend(self.alias) | |
803 | src_url = None |
|
803 | src_url = None | |
804 | if _clone_repo: |
|
804 | if _clone_repo: | |
805 | src_url = _clone_repo.path |
|
805 | src_url = _clone_repo.path | |
806 | repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare) |
|
806 | repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare) | |
807 | self._cleanup_repos.append(repo) |
|
807 | self._cleanup_repos.append(repo) | |
808 |
|
808 | |||
809 | commits = commits or [ |
|
809 | commits = commits or [ | |
810 | {'message': 'Commit %s of %s' % (x, repo_name)} |
|
810 | {'message': 'Commit %s of %s' % (x, repo_name)} | |
811 | for x in xrange(number_of_commits)] |
|
811 | for x in xrange(number_of_commits)] | |
812 | _add_commits_to_repo(repo, commits) |
|
812 | _add_commits_to_repo(repo, commits) | |
813 | return repo |
|
813 | return repo | |
814 |
|
814 | |||
815 | def clone_repo(self, repo): |
|
815 | def clone_repo(self, repo): | |
816 | return self.create_repo(_clone_repo=repo) |
|
816 | return self.create_repo(_clone_repo=repo) | |
817 |
|
817 | |||
818 | def cleanup(self): |
|
818 | def cleanup(self): | |
819 | for repo in self._cleanup_repos: |
|
819 | for repo in self._cleanup_repos: | |
820 | shutil.rmtree(repo.path) |
|
820 | shutil.rmtree(repo.path) | |
821 |
|
821 | |||
822 | def new_repo_path(self): |
|
822 | def new_repo_path(self): | |
823 | repo_name = self._next_repo_name() |
|
823 | repo_name = self._next_repo_name() | |
824 | self._repo_path = get_new_dir(repo_name) |
|
824 | self._repo_path = get_new_dir(repo_name) | |
825 | return self._repo_path |
|
825 | return self._repo_path | |
826 |
|
826 | |||
827 | def _next_repo_name(self): |
|
827 | def _next_repo_name(self): | |
828 | return "%s_%s" % ( |
|
828 | return "%s_%s" % ( | |
829 | self.invalid_repo_name.sub('_', self._test_name), |
|
829 | self.invalid_repo_name.sub('_', self._test_name), | |
830 | len(self._cleanup_repos)) |
|
830 | len(self._cleanup_repos)) | |
831 |
|
831 | |||
832 | def add_file(self, repo, filename, content='Test content\n'): |
|
832 | def add_file(self, repo, filename, content='Test content\n'): | |
833 | imc = repo.in_memory_commit |
|
833 | imc = repo.in_memory_commit | |
834 | imc.add(FileNode(filename, content=content)) |
|
834 | imc.add(FileNode(filename, content=content)) | |
835 | imc.commit( |
|
835 | imc.commit( | |
836 | message=u'Automatic commit from vcsbackend fixture', |
|
836 | message=u'Automatic commit from vcsbackend fixture', | |
837 | author=u'Automatic <automatic@rhodecode.com>') |
|
837 | author=u'Automatic <automatic@rhodecode.com>') | |
838 |
|
838 | |||
839 | def ensure_file(self, filename, content='Test content\n'): |
|
839 | def ensure_file(self, filename, content='Test content\n'): | |
840 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" |
|
840 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" | |
841 | self.add_file(self.repo, filename, content) |
|
841 | self.add_file(self.repo, filename, content) | |
842 |
|
842 | |||
843 |
|
843 | |||
844 | def _add_commits_to_repo(vcs_repo, commits): |
|
844 | def _add_commits_to_repo(vcs_repo, commits): | |
845 | commit_ids = {} |
|
845 | commit_ids = {} | |
846 | if not commits: |
|
846 | if not commits: | |
847 | return commit_ids |
|
847 | return commit_ids | |
848 |
|
848 | |||
849 | imc = vcs_repo.in_memory_commit |
|
849 | imc = vcs_repo.in_memory_commit | |
850 | commit = None |
|
850 | commit = None | |
851 |
|
851 | |||
852 | for idx, commit in enumerate(commits): |
|
852 | for idx, commit in enumerate(commits): | |
853 | message = unicode(commit.get('message', 'Commit %s' % idx)) |
|
853 | message = unicode(commit.get('message', 'Commit %s' % idx)) | |
854 |
|
854 | |||
855 | for node in commit.get('added', []): |
|
855 | for node in commit.get('added', []): | |
856 | imc.add(FileNode(node.path, content=node.content)) |
|
856 | imc.add(FileNode(node.path, content=node.content)) | |
857 | for node in commit.get('changed', []): |
|
857 | for node in commit.get('changed', []): | |
858 | imc.change(FileNode(node.path, content=node.content)) |
|
858 | imc.change(FileNode(node.path, content=node.content)) | |
859 | for node in commit.get('removed', []): |
|
859 | for node in commit.get('removed', []): | |
860 | imc.remove(FileNode(node.path)) |
|
860 | imc.remove(FileNode(node.path)) | |
861 |
|
861 | |||
862 | parents = [ |
|
862 | parents = [ | |
863 | vcs_repo.get_commit(commit_id=commit_ids[p]) |
|
863 | vcs_repo.get_commit(commit_id=commit_ids[p]) | |
864 | for p in commit.get('parents', [])] |
|
864 | for p in commit.get('parents', [])] | |
865 |
|
865 | |||
866 | operations = ('added', 'changed', 'removed') |
|
866 | operations = ('added', 'changed', 'removed') | |
867 | if not any((commit.get(o) for o in operations)): |
|
867 | if not any((commit.get(o) for o in operations)): | |
868 | imc.add(FileNode('file_%s' % idx, content=message)) |
|
868 | imc.add(FileNode('file_%s' % idx, content=message)) | |
869 |
|
869 | |||
870 | commit = imc.commit( |
|
870 | commit = imc.commit( | |
871 | message=message, |
|
871 | message=message, | |
872 | author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')), |
|
872 | author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')), | |
873 | date=commit.get('date'), |
|
873 | date=commit.get('date'), | |
874 | branch=commit.get('branch'), |
|
874 | branch=commit.get('branch'), | |
875 | parents=parents) |
|
875 | parents=parents) | |
876 |
|
876 | |||
877 | commit_ids[commit.message] = commit.raw_id |
|
877 | commit_ids[commit.message] = commit.raw_id | |
878 |
|
878 | |||
879 | return commit_ids |
|
879 | return commit_ids | |
880 |
|
880 | |||
881 |
|
881 | |||
882 | @pytest.fixture |
|
882 | @pytest.fixture | |
883 | def reposerver(request): |
|
883 | def reposerver(request): | |
884 | """ |
|
884 | """ | |
885 | Allows to serve a backend repository |
|
885 | Allows to serve a backend repository | |
886 | """ |
|
886 | """ | |
887 |
|
887 | |||
888 | repo_server = RepoServer() |
|
888 | repo_server = RepoServer() | |
889 | request.addfinalizer(repo_server.cleanup) |
|
889 | request.addfinalizer(repo_server.cleanup) | |
890 | return repo_server |
|
890 | return repo_server | |
891 |
|
891 | |||
892 |
|
892 | |||
893 | class RepoServer(object): |
|
893 | class RepoServer(object): | |
894 | """ |
|
894 | """ | |
895 | Utility to serve a local repository for the duration of a test case. |
|
895 | Utility to serve a local repository for the duration of a test case. | |
896 |
|
896 | |||
897 | Supports only Subversion so far. |
|
897 | Supports only Subversion so far. | |
898 | """ |
|
898 | """ | |
899 |
|
899 | |||
900 | url = None |
|
900 | url = None | |
901 |
|
901 | |||
902 | def __init__(self): |
|
902 | def __init__(self): | |
903 | self._cleanup_servers = [] |
|
903 | self._cleanup_servers = [] | |
904 |
|
904 | |||
905 | def serve(self, vcsrepo): |
|
905 | def serve(self, vcsrepo): | |
906 | if vcsrepo.alias != 'svn': |
|
906 | if vcsrepo.alias != 'svn': | |
907 | raise TypeError("Backend %s not supported" % vcsrepo.alias) |
|
907 | raise TypeError("Backend %s not supported" % vcsrepo.alias) | |
908 |
|
908 | |||
909 | proc = subprocess32.Popen( |
|
909 | proc = subprocess32.Popen( | |
910 | ['svnserve', '-d', '--foreground', '--listen-host', 'localhost', |
|
910 | ['svnserve', '-d', '--foreground', '--listen-host', 'localhost', | |
911 | '--root', vcsrepo.path]) |
|
911 | '--root', vcsrepo.path]) | |
912 | self._cleanup_servers.append(proc) |
|
912 | self._cleanup_servers.append(proc) | |
913 | self.url = 'svn://localhost' |
|
913 | self.url = 'svn://localhost' | |
914 |
|
914 | |||
915 | def cleanup(self): |
|
915 | def cleanup(self): | |
916 | for proc in self._cleanup_servers: |
|
916 | for proc in self._cleanup_servers: | |
917 | proc.terminate() |
|
917 | proc.terminate() | |
918 |
|
918 | |||
919 |
|
919 | |||
920 | @pytest.fixture |
|
920 | @pytest.fixture | |
921 | def pr_util(backend, request, config_stub): |
|
921 | def pr_util(backend, request, config_stub): | |
922 | """ |
|
922 | """ | |
923 | Utility for tests of models and for functional tests around pull requests. |
|
923 | Utility for tests of models and for functional tests around pull requests. | |
924 |
|
924 | |||
925 | It gives an instance of :class:`PRTestUtility` which provides various |
|
925 | It gives an instance of :class:`PRTestUtility` which provides various | |
926 | utility methods around one pull request. |
|
926 | utility methods around one pull request. | |
927 |
|
927 | |||
928 | This fixture uses `backend` and inherits its parameterization. |
|
928 | This fixture uses `backend` and inherits its parameterization. | |
929 | """ |
|
929 | """ | |
930 |
|
930 | |||
931 | util = PRTestUtility(backend) |
|
931 | util = PRTestUtility(backend) | |
932 | request.addfinalizer(util.cleanup) |
|
932 | request.addfinalizer(util.cleanup) | |
933 |
|
933 | |||
934 | return util |
|
934 | return util | |
935 |
|
935 | |||
936 |
|
936 | |||
937 | class PRTestUtility(object): |
|
937 | class PRTestUtility(object): | |
938 |
|
938 | |||
939 | pull_request = None |
|
939 | pull_request = None | |
940 | pull_request_id = None |
|
940 | pull_request_id = None | |
941 | mergeable_patcher = None |
|
941 | mergeable_patcher = None | |
942 | mergeable_mock = None |
|
942 | mergeable_mock = None | |
943 | notification_patcher = None |
|
943 | notification_patcher = None | |
944 |
|
944 | |||
945 | def __init__(self, backend): |
|
945 | def __init__(self, backend): | |
946 | self.backend = backend |
|
946 | self.backend = backend | |
947 |
|
947 | |||
948 | def create_pull_request( |
|
948 | def create_pull_request( | |
949 | self, commits=None, target_head=None, source_head=None, |
|
949 | self, commits=None, target_head=None, source_head=None, | |
950 | revisions=None, approved=False, author=None, mergeable=False, |
|
950 | revisions=None, approved=False, author=None, mergeable=False, | |
951 | enable_notifications=True, name_suffix=u'', reviewers=None, |
|
951 | enable_notifications=True, name_suffix=u'', reviewers=None, | |
952 | title=u"Test", description=u"Description"): |
|
952 | title=u"Test", description=u"Description"): | |
953 | self.set_mergeable(mergeable) |
|
953 | self.set_mergeable(mergeable) | |
954 | if not enable_notifications: |
|
954 | if not enable_notifications: | |
955 | # mock notification side effect |
|
955 | # mock notification side effect | |
956 | self.notification_patcher = mock.patch( |
|
956 | self.notification_patcher = mock.patch( | |
957 | 'rhodecode.model.notification.NotificationModel.create') |
|
957 | 'rhodecode.model.notification.NotificationModel.create') | |
958 | self.notification_patcher.start() |
|
958 | self.notification_patcher.start() | |
959 |
|
959 | |||
960 | if not self.pull_request: |
|
960 | if not self.pull_request: | |
961 | if not commits: |
|
961 | if not commits: | |
962 | commits = [ |
|
962 | commits = [ | |
963 | {'message': 'c1'}, |
|
963 | {'message': 'c1'}, | |
964 | {'message': 'c2'}, |
|
964 | {'message': 'c2'}, | |
965 | {'message': 'c3'}, |
|
965 | {'message': 'c3'}, | |
966 | ] |
|
966 | ] | |
967 | target_head = 'c1' |
|
967 | target_head = 'c1' | |
968 | source_head = 'c2' |
|
968 | source_head = 'c2' | |
969 | revisions = ['c2'] |
|
969 | revisions = ['c2'] | |
970 |
|
970 | |||
971 | self.commit_ids = self.backend.create_master_repo(commits) |
|
971 | self.commit_ids = self.backend.create_master_repo(commits) | |
972 | self.target_repository = self.backend.create_repo( |
|
972 | self.target_repository = self.backend.create_repo( | |
973 | heads=[target_head], name_suffix=name_suffix) |
|
973 | heads=[target_head], name_suffix=name_suffix) | |
974 | self.source_repository = self.backend.create_repo( |
|
974 | self.source_repository = self.backend.create_repo( | |
975 | heads=[source_head], name_suffix=name_suffix) |
|
975 | heads=[source_head], name_suffix=name_suffix) | |
976 | self.author = author or UserModel().get_by_username( |
|
976 | self.author = author or UserModel().get_by_username( | |
977 | TEST_USER_ADMIN_LOGIN) |
|
977 | TEST_USER_ADMIN_LOGIN) | |
978 |
|
978 | |||
979 | model = PullRequestModel() |
|
979 | model = PullRequestModel() | |
980 | self.create_parameters = { |
|
980 | self.create_parameters = { | |
981 | 'created_by': self.author, |
|
981 | 'created_by': self.author, | |
982 | 'source_repo': self.source_repository.repo_name, |
|
982 | 'source_repo': self.source_repository.repo_name, | |
983 | 'source_ref': self._default_branch_reference(source_head), |
|
983 | 'source_ref': self._default_branch_reference(source_head), | |
984 | 'target_repo': self.target_repository.repo_name, |
|
984 | 'target_repo': self.target_repository.repo_name, | |
985 | 'target_ref': self._default_branch_reference(target_head), |
|
985 | 'target_ref': self._default_branch_reference(target_head), | |
986 | 'revisions': [self.commit_ids[r] for r in revisions], |
|
986 | 'revisions': [self.commit_ids[r] for r in revisions], | |
987 | 'reviewers': reviewers or self._get_reviewers(), |
|
987 | 'reviewers': reviewers or self._get_reviewers(), | |
988 | 'title': title, |
|
988 | 'title': title, | |
989 | 'description': description, |
|
989 | 'description': description, | |
990 | } |
|
990 | } | |
991 | self.pull_request = model.create(**self.create_parameters) |
|
991 | self.pull_request = model.create(**self.create_parameters) | |
992 | assert model.get_versions(self.pull_request) == [] |
|
992 | assert model.get_versions(self.pull_request) == [] | |
993 |
|
993 | |||
994 | self.pull_request_id = self.pull_request.pull_request_id |
|
994 | self.pull_request_id = self.pull_request.pull_request_id | |
995 |
|
995 | |||
996 | if approved: |
|
996 | if approved: | |
997 | self.approve() |
|
997 | self.approve() | |
998 |
|
998 | |||
999 | Session().add(self.pull_request) |
|
999 | Session().add(self.pull_request) | |
1000 | Session().commit() |
|
1000 | Session().commit() | |
1001 |
|
1001 | |||
1002 | return self.pull_request |
|
1002 | return self.pull_request | |
1003 |
|
1003 | |||
1004 | def approve(self): |
|
1004 | def approve(self): | |
1005 | self.create_status_votes( |
|
1005 | self.create_status_votes( | |
1006 | ChangesetStatus.STATUS_APPROVED, |
|
1006 | ChangesetStatus.STATUS_APPROVED, | |
1007 | *self.pull_request.reviewers) |
|
1007 | *self.pull_request.reviewers) | |
1008 |
|
1008 | |||
1009 | def close(self): |
|
1009 | def close(self): | |
1010 | PullRequestModel().close_pull_request(self.pull_request, self.author) |
|
1010 | PullRequestModel().close_pull_request(self.pull_request, self.author) | |
1011 |
|
1011 | |||
1012 | def _default_branch_reference(self, commit_message): |
|
1012 | def _default_branch_reference(self, commit_message): | |
1013 | reference = '%s:%s:%s' % ( |
|
1013 | reference = '%s:%s:%s' % ( | |
1014 | 'branch', |
|
1014 | 'branch', | |
1015 | self.backend.default_branch_name, |
|
1015 | self.backend.default_branch_name, | |
1016 | self.commit_ids[commit_message]) |
|
1016 | self.commit_ids[commit_message]) | |
1017 | return reference |
|
1017 | return reference | |
1018 |
|
1018 | |||
1019 | def _get_reviewers(self): |
|
1019 | def _get_reviewers(self): | |
1020 | return [ |
|
1020 | return [ | |
1021 | (TEST_USER_REGULAR_LOGIN, ['default1'], False, []), |
|
1021 | (TEST_USER_REGULAR_LOGIN, ['default1'], False, []), | |
1022 | (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []), |
|
1022 | (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []), | |
1023 | ] |
|
1023 | ] | |
1024 |
|
1024 | |||
1025 | def update_source_repository(self, head=None): |
|
1025 | def update_source_repository(self, head=None): | |
1026 | heads = [head or 'c3'] |
|
1026 | heads = [head or 'c3'] | |
1027 | self.backend.pull_heads(self.source_repository, heads=heads) |
|
1027 | self.backend.pull_heads(self.source_repository, heads=heads) | |
1028 |
|
1028 | |||
1029 | def add_one_commit(self, head=None): |
|
1029 | def add_one_commit(self, head=None): | |
1030 | self.update_source_repository(head=head) |
|
1030 | self.update_source_repository(head=head) | |
1031 | old_commit_ids = set(self.pull_request.revisions) |
|
1031 | old_commit_ids = set(self.pull_request.revisions) | |
1032 | PullRequestModel().update_commits(self.pull_request) |
|
1032 | PullRequestModel().update_commits(self.pull_request) | |
1033 | commit_ids = set(self.pull_request.revisions) |
|
1033 | commit_ids = set(self.pull_request.revisions) | |
1034 | new_commit_ids = commit_ids - old_commit_ids |
|
1034 | new_commit_ids = commit_ids - old_commit_ids | |
1035 | assert len(new_commit_ids) == 1 |
|
1035 | assert len(new_commit_ids) == 1 | |
1036 | return new_commit_ids.pop() |
|
1036 | return new_commit_ids.pop() | |
1037 |
|
1037 | |||
1038 | def remove_one_commit(self): |
|
1038 | def remove_one_commit(self): | |
1039 | assert len(self.pull_request.revisions) == 2 |
|
1039 | assert len(self.pull_request.revisions) == 2 | |
1040 | source_vcs = self.source_repository.scm_instance() |
|
1040 | source_vcs = self.source_repository.scm_instance() | |
1041 | removed_commit_id = source_vcs.commit_ids[-1] |
|
1041 | removed_commit_id = source_vcs.commit_ids[-1] | |
1042 |
|
1042 | |||
1043 | # TODO: johbo: Git and Mercurial have an inconsistent vcs api here, |
|
1043 | # TODO: johbo: Git and Mercurial have an inconsistent vcs api here, | |
1044 | # remove the if once that's sorted out. |
|
1044 | # remove the if once that's sorted out. | |
1045 | if self.backend.alias == "git": |
|
1045 | if self.backend.alias == "git": | |
1046 | kwargs = {'branch_name': self.backend.default_branch_name} |
|
1046 | kwargs = {'branch_name': self.backend.default_branch_name} | |
1047 | else: |
|
1047 | else: | |
1048 | kwargs = {} |
|
1048 | kwargs = {} | |
1049 | source_vcs.strip(removed_commit_id, **kwargs) |
|
1049 | source_vcs.strip(removed_commit_id, **kwargs) | |
1050 |
|
1050 | |||
1051 | PullRequestModel().update_commits(self.pull_request) |
|
1051 | PullRequestModel().update_commits(self.pull_request) | |
1052 | assert len(self.pull_request.revisions) == 1 |
|
1052 | assert len(self.pull_request.revisions) == 1 | |
1053 | return removed_commit_id |
|
1053 | return removed_commit_id | |
1054 |
|
1054 | |||
1055 | def create_comment(self, linked_to=None): |
|
1055 | def create_comment(self, linked_to=None): | |
1056 | comment = CommentsModel().create( |
|
1056 | comment = CommentsModel().create( | |
1057 | text=u"Test comment", |
|
1057 | text=u"Test comment", | |
1058 | repo=self.target_repository.repo_name, |
|
1058 | repo=self.target_repository.repo_name, | |
1059 | user=self.author, |
|
1059 | user=self.author, | |
1060 | pull_request=self.pull_request) |
|
1060 | pull_request=self.pull_request) | |
1061 | assert comment.pull_request_version_id is None |
|
1061 | assert comment.pull_request_version_id is None | |
1062 |
|
1062 | |||
1063 | if linked_to: |
|
1063 | if linked_to: | |
1064 | PullRequestModel()._link_comments_to_version(linked_to) |
|
1064 | PullRequestModel()._link_comments_to_version(linked_to) | |
1065 |
|
1065 | |||
1066 | return comment |
|
1066 | return comment | |
1067 |
|
1067 | |||
1068 | def create_inline_comment( |
|
1068 | def create_inline_comment( | |
1069 | self, linked_to=None, line_no=u'n1', file_path='file_1'): |
|
1069 | self, linked_to=None, line_no=u'n1', file_path='file_1'): | |
1070 | comment = CommentsModel().create( |
|
1070 | comment = CommentsModel().create( | |
1071 | text=u"Test comment", |
|
1071 | text=u"Test comment", | |
1072 | repo=self.target_repository.repo_name, |
|
1072 | repo=self.target_repository.repo_name, | |
1073 | user=self.author, |
|
1073 | user=self.author, | |
1074 | line_no=line_no, |
|
1074 | line_no=line_no, | |
1075 | f_path=file_path, |
|
1075 | f_path=file_path, | |
1076 | pull_request=self.pull_request) |
|
1076 | pull_request=self.pull_request) | |
1077 | assert comment.pull_request_version_id is None |
|
1077 | assert comment.pull_request_version_id is None | |
1078 |
|
1078 | |||
1079 | if linked_to: |
|
1079 | if linked_to: | |
1080 | PullRequestModel()._link_comments_to_version(linked_to) |
|
1080 | PullRequestModel()._link_comments_to_version(linked_to) | |
1081 |
|
1081 | |||
1082 | return comment |
|
1082 | return comment | |
1083 |
|
1083 | |||
1084 | def create_version_of_pull_request(self): |
|
1084 | def create_version_of_pull_request(self): | |
1085 | pull_request = self.create_pull_request() |
|
1085 | pull_request = self.create_pull_request() | |
1086 | version = PullRequestModel()._create_version_from_snapshot( |
|
1086 | version = PullRequestModel()._create_version_from_snapshot( | |
1087 | pull_request) |
|
1087 | pull_request) | |
1088 | return version |
|
1088 | return version | |
1089 |
|
1089 | |||
1090 | def create_status_votes(self, status, *reviewers): |
|
1090 | def create_status_votes(self, status, *reviewers): | |
1091 | for reviewer in reviewers: |
|
1091 | for reviewer in reviewers: | |
1092 | ChangesetStatusModel().set_status( |
|
1092 | ChangesetStatusModel().set_status( | |
1093 | repo=self.pull_request.target_repo, |
|
1093 | repo=self.pull_request.target_repo, | |
1094 | status=status, |
|
1094 | status=status, | |
1095 | user=reviewer.user_id, |
|
1095 | user=reviewer.user_id, | |
1096 | pull_request=self.pull_request) |
|
1096 | pull_request=self.pull_request) | |
1097 |
|
1097 | |||
1098 | def set_mergeable(self, value): |
|
1098 | def set_mergeable(self, value): | |
1099 | if not self.mergeable_patcher: |
|
1099 | if not self.mergeable_patcher: | |
1100 | self.mergeable_patcher = mock.patch.object( |
|
1100 | self.mergeable_patcher = mock.patch.object( | |
1101 | VcsSettingsModel, 'get_general_settings') |
|
1101 | VcsSettingsModel, 'get_general_settings') | |
1102 | self.mergeable_mock = self.mergeable_patcher.start() |
|
1102 | self.mergeable_mock = self.mergeable_patcher.start() | |
1103 | self.mergeable_mock.return_value = { |
|
1103 | self.mergeable_mock.return_value = { | |
1104 | 'rhodecode_pr_merge_enabled': value} |
|
1104 | 'rhodecode_pr_merge_enabled': value} | |
1105 |
|
1105 | |||
1106 | def cleanup(self): |
|
1106 | def cleanup(self): | |
1107 | # In case the source repository is already cleaned up, the pull |
|
1107 | # In case the source repository is already cleaned up, the pull | |
1108 | # request will already be deleted. |
|
1108 | # request will already be deleted. | |
1109 | pull_request = PullRequest().get(self.pull_request_id) |
|
1109 | pull_request = PullRequest().get(self.pull_request_id) | |
1110 | if pull_request: |
|
1110 | if pull_request: | |
1111 | PullRequestModel().delete(pull_request, pull_request.author) |
|
1111 | PullRequestModel().delete(pull_request, pull_request.author) | |
1112 | Session().commit() |
|
1112 | Session().commit() | |
1113 |
|
1113 | |||
1114 | if self.notification_patcher: |
|
1114 | if self.notification_patcher: | |
1115 | self.notification_patcher.stop() |
|
1115 | self.notification_patcher.stop() | |
1116 |
|
1116 | |||
1117 | if self.mergeable_patcher: |
|
1117 | if self.mergeable_patcher: | |
1118 | self.mergeable_patcher.stop() |
|
1118 | self.mergeable_patcher.stop() | |
1119 |
|
1119 | |||
1120 |
|
1120 | |||
1121 | @pytest.fixture |
|
1121 | @pytest.fixture | |
1122 | def user_admin(baseapp): |
|
1122 | def user_admin(baseapp): | |
1123 | """ |
|
1123 | """ | |
1124 | Provides the default admin test user as an instance of `db.User`. |
|
1124 | Provides the default admin test user as an instance of `db.User`. | |
1125 | """ |
|
1125 | """ | |
1126 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1126 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
1127 | return user |
|
1127 | return user | |
1128 |
|
1128 | |||
1129 |
|
1129 | |||
1130 | @pytest.fixture |
|
1130 | @pytest.fixture | |
1131 | def user_regular(baseapp): |
|
1131 | def user_regular(baseapp): | |
1132 | """ |
|
1132 | """ | |
1133 | Provides the default regular test user as an instance of `db.User`. |
|
1133 | Provides the default regular test user as an instance of `db.User`. | |
1134 | """ |
|
1134 | """ | |
1135 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) |
|
1135 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) | |
1136 | return user |
|
1136 | return user | |
1137 |
|
1137 | |||
1138 |
|
1138 | |||
1139 | @pytest.fixture |
|
1139 | @pytest.fixture | |
1140 | def user_util(request, db_connection): |
|
1140 | def user_util(request, db_connection): | |
1141 | """ |
|
1141 | """ | |
1142 | Provides a wired instance of `UserUtility` with integrated cleanup. |
|
1142 | Provides a wired instance of `UserUtility` with integrated cleanup. | |
1143 | """ |
|
1143 | """ | |
1144 | utility = UserUtility(test_name=request.node.name) |
|
1144 | utility = UserUtility(test_name=request.node.name) | |
1145 | request.addfinalizer(utility.cleanup) |
|
1145 | request.addfinalizer(utility.cleanup) | |
1146 | return utility |
|
1146 | return utility | |
1147 |
|
1147 | |||
1148 |
|
1148 | |||
1149 | # TODO: johbo: Split this up into utilities per domain or something similar |
|
1149 | # TODO: johbo: Split this up into utilities per domain or something similar | |
1150 | class UserUtility(object): |
|
1150 | class UserUtility(object): | |
1151 |
|
1151 | |||
1152 | def __init__(self, test_name="test"): |
|
1152 | def __init__(self, test_name="test"): | |
1153 | self._test_name = self._sanitize_name(test_name) |
|
1153 | self._test_name = self._sanitize_name(test_name) | |
1154 | self.fixture = Fixture() |
|
1154 | self.fixture = Fixture() | |
1155 | self.repo_group_ids = [] |
|
1155 | self.repo_group_ids = [] | |
1156 | self.repos_ids = [] |
|
1156 | self.repos_ids = [] | |
1157 | self.user_ids = [] |
|
1157 | self.user_ids = [] | |
1158 | self.user_group_ids = [] |
|
1158 | self.user_group_ids = [] | |
1159 | self.user_repo_permission_ids = [] |
|
1159 | self.user_repo_permission_ids = [] | |
1160 | self.user_group_repo_permission_ids = [] |
|
1160 | self.user_group_repo_permission_ids = [] | |
1161 | self.user_repo_group_permission_ids = [] |
|
1161 | self.user_repo_group_permission_ids = [] | |
1162 | self.user_group_repo_group_permission_ids = [] |
|
1162 | self.user_group_repo_group_permission_ids = [] | |
1163 | self.user_user_group_permission_ids = [] |
|
1163 | self.user_user_group_permission_ids = [] | |
1164 | self.user_group_user_group_permission_ids = [] |
|
1164 | self.user_group_user_group_permission_ids = [] | |
1165 | self.user_permissions = [] |
|
1165 | self.user_permissions = [] | |
1166 |
|
1166 | |||
1167 | def _sanitize_name(self, name): |
|
1167 | def _sanitize_name(self, name): | |
1168 | for char in ['[', ']']: |
|
1168 | for char in ['[', ']']: | |
1169 | name = name.replace(char, '_') |
|
1169 | name = name.replace(char, '_') | |
1170 | return name |
|
1170 | return name | |
1171 |
|
1171 | |||
1172 | def create_repo_group( |
|
1172 | def create_repo_group( | |
1173 | self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True): |
|
1173 | self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True): | |
1174 | group_name = "{prefix}_repogroup_{count}".format( |
|
1174 | group_name = "{prefix}_repogroup_{count}".format( | |
1175 | prefix=self._test_name, |
|
1175 | prefix=self._test_name, | |
1176 | count=len(self.repo_group_ids)) |
|
1176 | count=len(self.repo_group_ids)) | |
1177 | repo_group = self.fixture.create_repo_group( |
|
1177 | repo_group = self.fixture.create_repo_group( | |
1178 | group_name, cur_user=owner) |
|
1178 | group_name, cur_user=owner) | |
1179 | if auto_cleanup: |
|
1179 | if auto_cleanup: | |
1180 | self.repo_group_ids.append(repo_group.group_id) |
|
1180 | self.repo_group_ids.append(repo_group.group_id) | |
1181 | return repo_group |
|
1181 | return repo_group | |
1182 |
|
1182 | |||
1183 | def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, |
|
1183 | def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, | |
1184 | auto_cleanup=True, repo_type='hg', bare=False): |
|
1184 | auto_cleanup=True, repo_type='hg', bare=False): | |
1185 | repo_name = "{prefix}_repository_{count}".format( |
|
1185 | repo_name = "{prefix}_repository_{count}".format( | |
1186 | prefix=self._test_name, |
|
1186 | prefix=self._test_name, | |
1187 | count=len(self.repos_ids)) |
|
1187 | count=len(self.repos_ids)) | |
1188 |
|
1188 | |||
1189 | repository = self.fixture.create_repo( |
|
1189 | repository = self.fixture.create_repo( | |
1190 | repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare) |
|
1190 | repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare) | |
1191 | if auto_cleanup: |
|
1191 | if auto_cleanup: | |
1192 | self.repos_ids.append(repository.repo_id) |
|
1192 | self.repos_ids.append(repository.repo_id) | |
1193 | return repository |
|
1193 | return repository | |
1194 |
|
1194 | |||
1195 | def create_user(self, auto_cleanup=True, **kwargs): |
|
1195 | def create_user(self, auto_cleanup=True, **kwargs): | |
1196 | user_name = "{prefix}_user_{count}".format( |
|
1196 | user_name = "{prefix}_user_{count}".format( | |
1197 | prefix=self._test_name, |
|
1197 | prefix=self._test_name, | |
1198 | count=len(self.user_ids)) |
|
1198 | count=len(self.user_ids)) | |
1199 | user = self.fixture.create_user(user_name, **kwargs) |
|
1199 | user = self.fixture.create_user(user_name, **kwargs) | |
1200 | if auto_cleanup: |
|
1200 | if auto_cleanup: | |
1201 | self.user_ids.append(user.user_id) |
|
1201 | self.user_ids.append(user.user_id) | |
1202 | return user |
|
1202 | return user | |
1203 |
|
1203 | |||
1204 | def create_additional_user_email(self, user, email): |
|
1204 | def create_additional_user_email(self, user, email): | |
1205 | uem = self.fixture.create_additional_user_email(user=user, email=email) |
|
1205 | uem = self.fixture.create_additional_user_email(user=user, email=email) | |
1206 | return uem |
|
1206 | return uem | |
1207 |
|
1207 | |||
1208 | def create_user_with_group(self): |
|
1208 | def create_user_with_group(self): | |
1209 | user = self.create_user() |
|
1209 | user = self.create_user() | |
1210 | user_group = self.create_user_group(members=[user]) |
|
1210 | user_group = self.create_user_group(members=[user]) | |
1211 | return user, user_group |
|
1211 | return user, user_group | |
1212 |
|
1212 | |||
1213 | def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None, |
|
1213 | def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None, | |
1214 | auto_cleanup=True, **kwargs): |
|
1214 | auto_cleanup=True, **kwargs): | |
1215 | group_name = "{prefix}_usergroup_{count}".format( |
|
1215 | group_name = "{prefix}_usergroup_{count}".format( | |
1216 | prefix=self._test_name, |
|
1216 | prefix=self._test_name, | |
1217 | count=len(self.user_group_ids)) |
|
1217 | count=len(self.user_group_ids)) | |
1218 | user_group = self.fixture.create_user_group( |
|
1218 | user_group = self.fixture.create_user_group( | |
1219 | group_name, cur_user=owner, **kwargs) |
|
1219 | group_name, cur_user=owner, **kwargs) | |
1220 |
|
1220 | |||
1221 | if auto_cleanup: |
|
1221 | if auto_cleanup: | |
1222 | self.user_group_ids.append(user_group.users_group_id) |
|
1222 | self.user_group_ids.append(user_group.users_group_id) | |
1223 | if members: |
|
1223 | if members: | |
1224 | for user in members: |
|
1224 | for user in members: | |
1225 | UserGroupModel().add_user_to_group(user_group, user) |
|
1225 | UserGroupModel().add_user_to_group(user_group, user) | |
1226 | return user_group |
|
1226 | return user_group | |
1227 |
|
1227 | |||
1228 | def grant_user_permission(self, user_name, permission_name): |
|
1228 | def grant_user_permission(self, user_name, permission_name): | |
1229 | self.inherit_default_user_permissions(user_name, False) |
|
1229 | self.inherit_default_user_permissions(user_name, False) | |
1230 | self.user_permissions.append((user_name, permission_name)) |
|
1230 | self.user_permissions.append((user_name, permission_name)) | |
1231 |
|
1231 | |||
1232 | def grant_user_permission_to_repo_group( |
|
1232 | def grant_user_permission_to_repo_group( | |
1233 | self, repo_group, user, permission_name): |
|
1233 | self, repo_group, user, permission_name): | |
1234 | permission = RepoGroupModel().grant_user_permission( |
|
1234 | permission = RepoGroupModel().grant_user_permission( | |
1235 | repo_group, user, permission_name) |
|
1235 | repo_group, user, permission_name) | |
1236 | self.user_repo_group_permission_ids.append( |
|
1236 | self.user_repo_group_permission_ids.append( | |
1237 | (repo_group.group_id, user.user_id)) |
|
1237 | (repo_group.group_id, user.user_id)) | |
1238 | return permission |
|
1238 | return permission | |
1239 |
|
1239 | |||
1240 | def grant_user_group_permission_to_repo_group( |
|
1240 | def grant_user_group_permission_to_repo_group( | |
1241 | self, repo_group, user_group, permission_name): |
|
1241 | self, repo_group, user_group, permission_name): | |
1242 | permission = RepoGroupModel().grant_user_group_permission( |
|
1242 | permission = RepoGroupModel().grant_user_group_permission( | |
1243 | repo_group, user_group, permission_name) |
|
1243 | repo_group, user_group, permission_name) | |
1244 | self.user_group_repo_group_permission_ids.append( |
|
1244 | self.user_group_repo_group_permission_ids.append( | |
1245 | (repo_group.group_id, user_group.users_group_id)) |
|
1245 | (repo_group.group_id, user_group.users_group_id)) | |
1246 | return permission |
|
1246 | return permission | |
1247 |
|
1247 | |||
1248 | def grant_user_permission_to_repo( |
|
1248 | def grant_user_permission_to_repo( | |
1249 | self, repo, user, permission_name): |
|
1249 | self, repo, user, permission_name): | |
1250 | permission = RepoModel().grant_user_permission( |
|
1250 | permission = RepoModel().grant_user_permission( | |
1251 | repo, user, permission_name) |
|
1251 | repo, user, permission_name) | |
1252 | self.user_repo_permission_ids.append( |
|
1252 | self.user_repo_permission_ids.append( | |
1253 | (repo.repo_id, user.user_id)) |
|
1253 | (repo.repo_id, user.user_id)) | |
1254 | return permission |
|
1254 | return permission | |
1255 |
|
1255 | |||
1256 | def grant_user_group_permission_to_repo( |
|
1256 | def grant_user_group_permission_to_repo( | |
1257 | self, repo, user_group, permission_name): |
|
1257 | self, repo, user_group, permission_name): | |
1258 | permission = RepoModel().grant_user_group_permission( |
|
1258 | permission = RepoModel().grant_user_group_permission( | |
1259 | repo, user_group, permission_name) |
|
1259 | repo, user_group, permission_name) | |
1260 | self.user_group_repo_permission_ids.append( |
|
1260 | self.user_group_repo_permission_ids.append( | |
1261 | (repo.repo_id, user_group.users_group_id)) |
|
1261 | (repo.repo_id, user_group.users_group_id)) | |
1262 | return permission |
|
1262 | return permission | |
1263 |
|
1263 | |||
1264 | def grant_user_permission_to_user_group( |
|
1264 | def grant_user_permission_to_user_group( | |
1265 | self, target_user_group, user, permission_name): |
|
1265 | self, target_user_group, user, permission_name): | |
1266 | permission = UserGroupModel().grant_user_permission( |
|
1266 | permission = UserGroupModel().grant_user_permission( | |
1267 | target_user_group, user, permission_name) |
|
1267 | target_user_group, user, permission_name) | |
1268 | self.user_user_group_permission_ids.append( |
|
1268 | self.user_user_group_permission_ids.append( | |
1269 | (target_user_group.users_group_id, user.user_id)) |
|
1269 | (target_user_group.users_group_id, user.user_id)) | |
1270 | return permission |
|
1270 | return permission | |
1271 |
|
1271 | |||
1272 | def grant_user_group_permission_to_user_group( |
|
1272 | def grant_user_group_permission_to_user_group( | |
1273 | self, target_user_group, user_group, permission_name): |
|
1273 | self, target_user_group, user_group, permission_name): | |
1274 | permission = UserGroupModel().grant_user_group_permission( |
|
1274 | permission = UserGroupModel().grant_user_group_permission( | |
1275 | target_user_group, user_group, permission_name) |
|
1275 | target_user_group, user_group, permission_name) | |
1276 | self.user_group_user_group_permission_ids.append( |
|
1276 | self.user_group_user_group_permission_ids.append( | |
1277 | (target_user_group.users_group_id, user_group.users_group_id)) |
|
1277 | (target_user_group.users_group_id, user_group.users_group_id)) | |
1278 | return permission |
|
1278 | return permission | |
1279 |
|
1279 | |||
1280 | def revoke_user_permission(self, user_name, permission_name): |
|
1280 | def revoke_user_permission(self, user_name, permission_name): | |
1281 | self.inherit_default_user_permissions(user_name, True) |
|
1281 | self.inherit_default_user_permissions(user_name, True) | |
1282 | UserModel().revoke_perm(user_name, permission_name) |
|
1282 | UserModel().revoke_perm(user_name, permission_name) | |
1283 |
|
1283 | |||
1284 | def inherit_default_user_permissions(self, user_name, value): |
|
1284 | def inherit_default_user_permissions(self, user_name, value): | |
1285 | user = UserModel().get_by_username(user_name) |
|
1285 | user = UserModel().get_by_username(user_name) | |
1286 | user.inherit_default_permissions = value |
|
1286 | user.inherit_default_permissions = value | |
1287 | Session().add(user) |
|
1287 | Session().add(user) | |
1288 | Session().commit() |
|
1288 | Session().commit() | |
1289 |
|
1289 | |||
1290 | def cleanup(self): |
|
1290 | def cleanup(self): | |
1291 | self._cleanup_permissions() |
|
1291 | self._cleanup_permissions() | |
1292 | self._cleanup_repos() |
|
1292 | self._cleanup_repos() | |
1293 | self._cleanup_repo_groups() |
|
1293 | self._cleanup_repo_groups() | |
1294 | self._cleanup_user_groups() |
|
1294 | self._cleanup_user_groups() | |
1295 | self._cleanup_users() |
|
1295 | self._cleanup_users() | |
1296 |
|
1296 | |||
1297 | def _cleanup_permissions(self): |
|
1297 | def _cleanup_permissions(self): | |
1298 | if self.user_permissions: |
|
1298 | if self.user_permissions: | |
1299 | for user_name, permission_name in self.user_permissions: |
|
1299 | for user_name, permission_name in self.user_permissions: | |
1300 | self.revoke_user_permission(user_name, permission_name) |
|
1300 | self.revoke_user_permission(user_name, permission_name) | |
1301 |
|
1301 | |||
1302 | for permission in self.user_repo_permission_ids: |
|
1302 | for permission in self.user_repo_permission_ids: | |
1303 | RepoModel().revoke_user_permission(*permission) |
|
1303 | RepoModel().revoke_user_permission(*permission) | |
1304 |
|
1304 | |||
1305 | for permission in self.user_group_repo_permission_ids: |
|
1305 | for permission in self.user_group_repo_permission_ids: | |
1306 | RepoModel().revoke_user_group_permission(*permission) |
|
1306 | RepoModel().revoke_user_group_permission(*permission) | |
1307 |
|
1307 | |||
1308 | for permission in self.user_repo_group_permission_ids: |
|
1308 | for permission in self.user_repo_group_permission_ids: | |
1309 | RepoGroupModel().revoke_user_permission(*permission) |
|
1309 | RepoGroupModel().revoke_user_permission(*permission) | |
1310 |
|
1310 | |||
1311 | for permission in self.user_group_repo_group_permission_ids: |
|
1311 | for permission in self.user_group_repo_group_permission_ids: | |
1312 | RepoGroupModel().revoke_user_group_permission(*permission) |
|
1312 | RepoGroupModel().revoke_user_group_permission(*permission) | |
1313 |
|
1313 | |||
1314 | for permission in self.user_user_group_permission_ids: |
|
1314 | for permission in self.user_user_group_permission_ids: | |
1315 | UserGroupModel().revoke_user_permission(*permission) |
|
1315 | UserGroupModel().revoke_user_permission(*permission) | |
1316 |
|
1316 | |||
1317 | for permission in self.user_group_user_group_permission_ids: |
|
1317 | for permission in self.user_group_user_group_permission_ids: | |
1318 | UserGroupModel().revoke_user_group_permission(*permission) |
|
1318 | UserGroupModel().revoke_user_group_permission(*permission) | |
1319 |
|
1319 | |||
1320 | def _cleanup_repo_groups(self): |
|
1320 | def _cleanup_repo_groups(self): | |
1321 | def _repo_group_compare(first_group_id, second_group_id): |
|
1321 | def _repo_group_compare(first_group_id, second_group_id): | |
1322 | """ |
|
1322 | """ | |
1323 | Gives higher priority to the groups with the most complex paths |
|
1323 | Gives higher priority to the groups with the most complex paths | |
1324 | """ |
|
1324 | """ | |
1325 | first_group = RepoGroup.get(first_group_id) |
|
1325 | first_group = RepoGroup.get(first_group_id) | |
1326 | second_group = RepoGroup.get(second_group_id) |
|
1326 | second_group = RepoGroup.get(second_group_id) | |
1327 | first_group_parts = ( |
|
1327 | first_group_parts = ( | |
1328 | len(first_group.group_name.split('/')) if first_group else 0) |
|
1328 | len(first_group.group_name.split('/')) if first_group else 0) | |
1329 | second_group_parts = ( |
|
1329 | second_group_parts = ( | |
1330 | len(second_group.group_name.split('/')) if second_group else 0) |
|
1330 | len(second_group.group_name.split('/')) if second_group else 0) | |
1331 | return cmp(second_group_parts, first_group_parts) |
|
1331 | return cmp(second_group_parts, first_group_parts) | |
1332 |
|
1332 | |||
1333 | sorted_repo_group_ids = sorted( |
|
1333 | sorted_repo_group_ids = sorted( | |
1334 | self.repo_group_ids, cmp=_repo_group_compare) |
|
1334 | self.repo_group_ids, cmp=_repo_group_compare) | |
1335 | for repo_group_id in sorted_repo_group_ids: |
|
1335 | for repo_group_id in sorted_repo_group_ids: | |
1336 | self.fixture.destroy_repo_group(repo_group_id) |
|
1336 | self.fixture.destroy_repo_group(repo_group_id) | |
1337 |
|
1337 | |||
1338 | def _cleanup_repos(self): |
|
1338 | def _cleanup_repos(self): | |
1339 | sorted_repos_ids = sorted(self.repos_ids) |
|
1339 | sorted_repos_ids = sorted(self.repos_ids) | |
1340 | for repo_id in sorted_repos_ids: |
|
1340 | for repo_id in sorted_repos_ids: | |
1341 | self.fixture.destroy_repo(repo_id) |
|
1341 | self.fixture.destroy_repo(repo_id) | |
1342 |
|
1342 | |||
1343 | def _cleanup_user_groups(self): |
|
1343 | def _cleanup_user_groups(self): | |
1344 | def _user_group_compare(first_group_id, second_group_id): |
|
1344 | def _user_group_compare(first_group_id, second_group_id): | |
1345 | """ |
|
1345 | """ | |
1346 | Gives higher priority to the groups with the most complex paths |
|
1346 | Gives higher priority to the groups with the most complex paths | |
1347 | """ |
|
1347 | """ | |
1348 | first_group = UserGroup.get(first_group_id) |
|
1348 | first_group = UserGroup.get(first_group_id) | |
1349 | second_group = UserGroup.get(second_group_id) |
|
1349 | second_group = UserGroup.get(second_group_id) | |
1350 | first_group_parts = ( |
|
1350 | first_group_parts = ( | |
1351 | len(first_group.users_group_name.split('/')) |
|
1351 | len(first_group.users_group_name.split('/')) | |
1352 | if first_group else 0) |
|
1352 | if first_group else 0) | |
1353 | second_group_parts = ( |
|
1353 | second_group_parts = ( | |
1354 | len(second_group.users_group_name.split('/')) |
|
1354 | len(second_group.users_group_name.split('/')) | |
1355 | if second_group else 0) |
|
1355 | if second_group else 0) | |
1356 | return cmp(second_group_parts, first_group_parts) |
|
1356 | return cmp(second_group_parts, first_group_parts) | |
1357 |
|
1357 | |||
1358 | sorted_user_group_ids = sorted( |
|
1358 | sorted_user_group_ids = sorted( | |
1359 | self.user_group_ids, cmp=_user_group_compare) |
|
1359 | self.user_group_ids, cmp=_user_group_compare) | |
1360 | for user_group_id in sorted_user_group_ids: |
|
1360 | for user_group_id in sorted_user_group_ids: | |
1361 | self.fixture.destroy_user_group(user_group_id) |
|
1361 | self.fixture.destroy_user_group(user_group_id) | |
1362 |
|
1362 | |||
1363 | def _cleanup_users(self): |
|
1363 | def _cleanup_users(self): | |
1364 | for user_id in self.user_ids: |
|
1364 | for user_id in self.user_ids: | |
1365 | self.fixture.destroy_user(user_id) |
|
1365 | self.fixture.destroy_user(user_id) | |
1366 |
|
1366 | |||
1367 |
|
1367 | |||
1368 | # TODO: Think about moving this into a pytest-pyro package and make it a |
|
1368 | # TODO: Think about moving this into a pytest-pyro package and make it a | |
1369 | # pytest plugin |
|
1369 | # pytest plugin | |
1370 | @pytest.hookimpl(tryfirst=True, hookwrapper=True) |
|
1370 | @pytest.hookimpl(tryfirst=True, hookwrapper=True) | |
1371 | def pytest_runtest_makereport(item, call): |
|
1371 | def pytest_runtest_makereport(item, call): | |
1372 | """ |
|
1372 | """ | |
1373 | Adding the remote traceback if the exception has this information. |
|
1373 | Adding the remote traceback if the exception has this information. | |
1374 |
|
1374 | |||
1375 | VCSServer attaches this information as the attribute `_vcs_server_traceback` |
|
1375 | VCSServer attaches this information as the attribute `_vcs_server_traceback` | |
1376 | to the exception instance. |
|
1376 | to the exception instance. | |
1377 | """ |
|
1377 | """ | |
1378 | outcome = yield |
|
1378 | outcome = yield | |
1379 | report = outcome.get_result() |
|
1379 | report = outcome.get_result() | |
1380 | if call.excinfo: |
|
1380 | if call.excinfo: | |
1381 | _add_vcsserver_remote_traceback(report, call.excinfo.value) |
|
1381 | _add_vcsserver_remote_traceback(report, call.excinfo.value) | |
1382 |
|
1382 | |||
1383 |
|
1383 | |||
1384 | def _add_vcsserver_remote_traceback(report, exc): |
|
1384 | def _add_vcsserver_remote_traceback(report, exc): | |
1385 | vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None) |
|
1385 | vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None) | |
1386 |
|
1386 | |||
1387 | if vcsserver_traceback: |
|
1387 | if vcsserver_traceback: | |
1388 | section = 'VCSServer remote traceback ' + report.when |
|
1388 | section = 'VCSServer remote traceback ' + report.when | |
1389 | report.sections.append((section, vcsserver_traceback)) |
|
1389 | report.sections.append((section, vcsserver_traceback)) | |
1390 |
|
1390 | |||
1391 |
|
1391 | |||
1392 | @pytest.fixture(scope='session') |
|
1392 | @pytest.fixture(scope='session') | |
1393 | def testrun(): |
|
1393 | def testrun(): | |
1394 | return { |
|
1394 | return { | |
1395 | 'uuid': uuid.uuid4(), |
|
1395 | 'uuid': uuid.uuid4(), | |
1396 | 'start': datetime.datetime.utcnow().isoformat(), |
|
1396 | 'start': datetime.datetime.utcnow().isoformat(), | |
1397 | 'timestamp': int(time.time()), |
|
1397 | 'timestamp': int(time.time()), | |
1398 | } |
|
1398 | } | |
1399 |
|
1399 | |||
1400 |
|
1400 | |||
1401 | @pytest.fixture(autouse=True) |
|
1401 | @pytest.fixture(autouse=True) | |
1402 | def collect_appenlight_stats(request, testrun): |
|
1402 | def collect_appenlight_stats(request, testrun): | |
1403 | """ |
|
1403 | """ | |
1404 | This fixture reports memory consumtion of single tests. |
|
1404 | This fixture reports memory consumtion of single tests. | |
1405 |
|
1405 | |||
1406 | It gathers data based on `psutil` and sends them to Appenlight. The option |
|
1406 | It gathers data based on `psutil` and sends them to Appenlight. The option | |
1407 | ``--ae`` has te be used to enable this fixture and the API key for your |
|
1407 | ``--ae`` has te be used to enable this fixture and the API key for your | |
1408 | application has to be provided in ``--ae-key``. |
|
1408 | application has to be provided in ``--ae-key``. | |
1409 | """ |
|
1409 | """ | |
1410 | try: |
|
1410 | try: | |
1411 | # cygwin cannot have yet psutil support. |
|
1411 | # cygwin cannot have yet psutil support. | |
1412 | import psutil |
|
1412 | import psutil | |
1413 | except ImportError: |
|
1413 | except ImportError: | |
1414 | return |
|
1414 | return | |
1415 |
|
1415 | |||
1416 | if not request.config.getoption('--appenlight'): |
|
1416 | if not request.config.getoption('--appenlight'): | |
1417 | return |
|
1417 | return | |
1418 | else: |
|
1418 | else: | |
1419 | # Only request the baseapp fixture if appenlight tracking is |
|
1419 | # Only request the baseapp fixture if appenlight tracking is | |
1420 | # enabled. This will speed up a test run of unit tests by 2 to 3 |
|
1420 | # enabled. This will speed up a test run of unit tests by 2 to 3 | |
1421 | # seconds if appenlight is not enabled. |
|
1421 | # seconds if appenlight is not enabled. | |
1422 | baseapp = request.getfuncargvalue("baseapp") |
|
1422 | baseapp = request.getfuncargvalue("baseapp") | |
1423 | url = '{}/api/logs'.format(request.config.getoption('--appenlight-url')) |
|
1423 | url = '{}/api/logs'.format(request.config.getoption('--appenlight-url')) | |
1424 | client = AppenlightClient( |
|
1424 | client = AppenlightClient( | |
1425 | url=url, |
|
1425 | url=url, | |
1426 | api_key=request.config.getoption('--appenlight-api-key'), |
|
1426 | api_key=request.config.getoption('--appenlight-api-key'), | |
1427 | namespace=request.node.nodeid, |
|
1427 | namespace=request.node.nodeid, | |
1428 | request=str(testrun['uuid']), |
|
1428 | request=str(testrun['uuid']), | |
1429 | testrun=testrun) |
|
1429 | testrun=testrun) | |
1430 |
|
1430 | |||
1431 | client.collect({ |
|
1431 | client.collect({ | |
1432 | 'message': "Starting", |
|
1432 | 'message': "Starting", | |
1433 | }) |
|
1433 | }) | |
1434 |
|
1434 | |||
1435 | server_and_port = baseapp.config.get_settings()['vcs.server'] |
|
1435 | server_and_port = baseapp.config.get_settings()['vcs.server'] | |
1436 | protocol = baseapp.config.get_settings()['vcs.server.protocol'] |
|
1436 | protocol = baseapp.config.get_settings()['vcs.server.protocol'] | |
1437 | server = create_vcsserver_proxy(server_and_port, protocol) |
|
1437 | server = create_vcsserver_proxy(server_and_port, protocol) | |
1438 | with server: |
|
1438 | with server: | |
1439 | vcs_pid = server.get_pid() |
|
1439 | vcs_pid = server.get_pid() | |
1440 | server.run_gc() |
|
1440 | server.run_gc() | |
1441 | vcs_process = psutil.Process(vcs_pid) |
|
1441 | vcs_process = psutil.Process(vcs_pid) | |
1442 | mem = vcs_process.memory_info() |
|
1442 | mem = vcs_process.memory_info() | |
1443 | client.tag_before('vcsserver.rss', mem.rss) |
|
1443 | client.tag_before('vcsserver.rss', mem.rss) | |
1444 | client.tag_before('vcsserver.vms', mem.vms) |
|
1444 | client.tag_before('vcsserver.vms', mem.vms) | |
1445 |
|
1445 | |||
1446 | test_process = psutil.Process() |
|
1446 | test_process = psutil.Process() | |
1447 | mem = test_process.memory_info() |
|
1447 | mem = test_process.memory_info() | |
1448 | client.tag_before('test.rss', mem.rss) |
|
1448 | client.tag_before('test.rss', mem.rss) | |
1449 | client.tag_before('test.vms', mem.vms) |
|
1449 | client.tag_before('test.vms', mem.vms) | |
1450 |
|
1450 | |||
1451 | client.tag_before('time', time.time()) |
|
1451 | client.tag_before('time', time.time()) | |
1452 |
|
1452 | |||
1453 | @request.addfinalizer |
|
1453 | @request.addfinalizer | |
1454 | def send_stats(): |
|
1454 | def send_stats(): | |
1455 | client.tag_after('time', time.time()) |
|
1455 | client.tag_after('time', time.time()) | |
1456 | with server: |
|
1456 | with server: | |
1457 | gc_stats = server.run_gc() |
|
1457 | gc_stats = server.run_gc() | |
1458 | for tag, value in gc_stats.items(): |
|
1458 | for tag, value in gc_stats.items(): | |
1459 | client.tag_after(tag, value) |
|
1459 | client.tag_after(tag, value) | |
1460 | mem = vcs_process.memory_info() |
|
1460 | mem = vcs_process.memory_info() | |
1461 | client.tag_after('vcsserver.rss', mem.rss) |
|
1461 | client.tag_after('vcsserver.rss', mem.rss) | |
1462 | client.tag_after('vcsserver.vms', mem.vms) |
|
1462 | client.tag_after('vcsserver.vms', mem.vms) | |
1463 |
|
1463 | |||
1464 | mem = test_process.memory_info() |
|
1464 | mem = test_process.memory_info() | |
1465 | client.tag_after('test.rss', mem.rss) |
|
1465 | client.tag_after('test.rss', mem.rss) | |
1466 | client.tag_after('test.vms', mem.vms) |
|
1466 | client.tag_after('test.vms', mem.vms) | |
1467 |
|
1467 | |||
1468 | client.collect({ |
|
1468 | client.collect({ | |
1469 | 'message': "Finished", |
|
1469 | 'message': "Finished", | |
1470 | }) |
|
1470 | }) | |
1471 | client.send_stats() |
|
1471 | client.send_stats() | |
1472 |
|
1472 | |||
1473 | return client |
|
1473 | return client | |
1474 |
|
1474 | |||
1475 |
|
1475 | |||
1476 | class AppenlightClient(): |
|
1476 | class AppenlightClient(): | |
1477 |
|
1477 | |||
1478 | url_template = '{url}?protocol_version=0.5' |
|
1478 | url_template = '{url}?protocol_version=0.5' | |
1479 |
|
1479 | |||
1480 | def __init__( |
|
1480 | def __init__( | |
1481 | self, url, api_key, add_server=True, add_timestamp=True, |
|
1481 | self, url, api_key, add_server=True, add_timestamp=True, | |
1482 | namespace=None, request=None, testrun=None): |
|
1482 | namespace=None, request=None, testrun=None): | |
1483 | self.url = self.url_template.format(url=url) |
|
1483 | self.url = self.url_template.format(url=url) | |
1484 | self.api_key = api_key |
|
1484 | self.api_key = api_key | |
1485 | self.add_server = add_server |
|
1485 | self.add_server = add_server | |
1486 | self.add_timestamp = add_timestamp |
|
1486 | self.add_timestamp = add_timestamp | |
1487 | self.namespace = namespace |
|
1487 | self.namespace = namespace | |
1488 | self.request = request |
|
1488 | self.request = request | |
1489 | self.server = socket.getfqdn(socket.gethostname()) |
|
1489 | self.server = socket.getfqdn(socket.gethostname()) | |
1490 | self.tags_before = {} |
|
1490 | self.tags_before = {} | |
1491 | self.tags_after = {} |
|
1491 | self.tags_after = {} | |
1492 | self.stats = [] |
|
1492 | self.stats = [] | |
1493 | self.testrun = testrun or {} |
|
1493 | self.testrun = testrun or {} | |
1494 |
|
1494 | |||
1495 | def tag_before(self, tag, value): |
|
1495 | def tag_before(self, tag, value): | |
1496 | self.tags_before[tag] = value |
|
1496 | self.tags_before[tag] = value | |
1497 |
|
1497 | |||
1498 | def tag_after(self, tag, value): |
|
1498 | def tag_after(self, tag, value): | |
1499 | self.tags_after[tag] = value |
|
1499 | self.tags_after[tag] = value | |
1500 |
|
1500 | |||
1501 | def collect(self, data): |
|
1501 | def collect(self, data): | |
1502 | if self.add_server: |
|
1502 | if self.add_server: | |
1503 | data.setdefault('server', self.server) |
|
1503 | data.setdefault('server', self.server) | |
1504 | if self.add_timestamp: |
|
1504 | if self.add_timestamp: | |
1505 | data.setdefault('date', datetime.datetime.utcnow().isoformat()) |
|
1505 | data.setdefault('date', datetime.datetime.utcnow().isoformat()) | |
1506 | if self.namespace: |
|
1506 | if self.namespace: | |
1507 | data.setdefault('namespace', self.namespace) |
|
1507 | data.setdefault('namespace', self.namespace) | |
1508 | if self.request: |
|
1508 | if self.request: | |
1509 | data.setdefault('request', self.request) |
|
1509 | data.setdefault('request', self.request) | |
1510 | self.stats.append(data) |
|
1510 | self.stats.append(data) | |
1511 |
|
1511 | |||
1512 | def send_stats(self): |
|
1512 | def send_stats(self): | |
1513 | tags = [ |
|
1513 | tags = [ | |
1514 | ('testrun', self.request), |
|
1514 | ('testrun', self.request), | |
1515 | ('testrun.start', self.testrun['start']), |
|
1515 | ('testrun.start', self.testrun['start']), | |
1516 | ('testrun.timestamp', self.testrun['timestamp']), |
|
1516 | ('testrun.timestamp', self.testrun['timestamp']), | |
1517 | ('test', self.namespace), |
|
1517 | ('test', self.namespace), | |
1518 | ] |
|
1518 | ] | |
1519 | for key, value in self.tags_before.items(): |
|
1519 | for key, value in self.tags_before.items(): | |
1520 | tags.append((key + '.before', value)) |
|
1520 | tags.append((key + '.before', value)) | |
1521 | try: |
|
1521 | try: | |
1522 | delta = self.tags_after[key] - value |
|
1522 | delta = self.tags_after[key] - value | |
1523 | tags.append((key + '.delta', delta)) |
|
1523 | tags.append((key + '.delta', delta)) | |
1524 | except Exception: |
|
1524 | except Exception: | |
1525 | pass |
|
1525 | pass | |
1526 | for key, value in self.tags_after.items(): |
|
1526 | for key, value in self.tags_after.items(): | |
1527 | tags.append((key + '.after', value)) |
|
1527 | tags.append((key + '.after', value)) | |
1528 | self.collect({ |
|
1528 | self.collect({ | |
1529 | 'message': "Collected tags", |
|
1529 | 'message': "Collected tags", | |
1530 | 'tags': tags, |
|
1530 | 'tags': tags, | |
1531 | }) |
|
1531 | }) | |
1532 |
|
1532 | |||
1533 | response = requests.post( |
|
1533 | response = requests.post( | |
1534 | self.url, |
|
1534 | self.url, | |
1535 | headers={ |
|
1535 | headers={ | |
1536 | 'X-appenlight-api-key': self.api_key}, |
|
1536 | 'X-appenlight-api-key': self.api_key}, | |
1537 | json=self.stats, |
|
1537 | json=self.stats, | |
1538 | ) |
|
1538 | ) | |
1539 |
|
1539 | |||
1540 | if not response.status_code == 200: |
|
1540 | if not response.status_code == 200: | |
1541 | pprint.pprint(self.stats) |
|
1541 | pprint.pprint(self.stats) | |
1542 | print(response.headers) |
|
1542 | print(response.headers) | |
1543 | print(response.text) |
|
1543 | print(response.text) | |
1544 | raise Exception('Sending to appenlight failed') |
|
1544 | raise Exception('Sending to appenlight failed') | |
1545 |
|
1545 | |||
1546 |
|
1546 | |||
1547 | @pytest.fixture |
|
1547 | @pytest.fixture | |
1548 | def gist_util(request, db_connection): |
|
1548 | def gist_util(request, db_connection): | |
1549 | """ |
|
1549 | """ | |
1550 | Provides a wired instance of `GistUtility` with integrated cleanup. |
|
1550 | Provides a wired instance of `GistUtility` with integrated cleanup. | |
1551 | """ |
|
1551 | """ | |
1552 | utility = GistUtility() |
|
1552 | utility = GistUtility() | |
1553 | request.addfinalizer(utility.cleanup) |
|
1553 | request.addfinalizer(utility.cleanup) | |
1554 | return utility |
|
1554 | return utility | |
1555 |
|
1555 | |||
1556 |
|
1556 | |||
1557 | class GistUtility(object): |
|
1557 | class GistUtility(object): | |
1558 | def __init__(self): |
|
1558 | def __init__(self): | |
1559 | self.fixture = Fixture() |
|
1559 | self.fixture = Fixture() | |
1560 | self.gist_ids = [] |
|
1560 | self.gist_ids = [] | |
1561 |
|
1561 | |||
1562 | def create_gist(self, **kwargs): |
|
1562 | def create_gist(self, **kwargs): | |
1563 | gist = self.fixture.create_gist(**kwargs) |
|
1563 | gist = self.fixture.create_gist(**kwargs) | |
1564 | self.gist_ids.append(gist.gist_id) |
|
1564 | self.gist_ids.append(gist.gist_id) | |
1565 | return gist |
|
1565 | return gist | |
1566 |
|
1566 | |||
1567 | def cleanup(self): |
|
1567 | def cleanup(self): | |
1568 | for id_ in self.gist_ids: |
|
1568 | for id_ in self.gist_ids: | |
1569 | self.fixture.destroy_gists(str(id_)) |
|
1569 | self.fixture.destroy_gists(str(id_)) | |
1570 |
|
1570 | |||
1571 |
|
1571 | |||
1572 | @pytest.fixture |
|
1572 | @pytest.fixture | |
1573 | def enabled_backends(request): |
|
1573 | def enabled_backends(request): | |
1574 | backends = request.config.option.backends |
|
1574 | backends = request.config.option.backends | |
1575 | return backends[:] |
|
1575 | return backends[:] | |
1576 |
|
1576 | |||
1577 |
|
1577 | |||
1578 | @pytest.fixture |
|
1578 | @pytest.fixture | |
1579 | def settings_util(request, db_connection): |
|
1579 | def settings_util(request, db_connection): | |
1580 | """ |
|
1580 | """ | |
1581 | Provides a wired instance of `SettingsUtility` with integrated cleanup. |
|
1581 | Provides a wired instance of `SettingsUtility` with integrated cleanup. | |
1582 | """ |
|
1582 | """ | |
1583 | utility = SettingsUtility() |
|
1583 | utility = SettingsUtility() | |
1584 | request.addfinalizer(utility.cleanup) |
|
1584 | request.addfinalizer(utility.cleanup) | |
1585 | return utility |
|
1585 | return utility | |
1586 |
|
1586 | |||
1587 |
|
1587 | |||
1588 | class SettingsUtility(object): |
|
1588 | class SettingsUtility(object): | |
1589 | def __init__(self): |
|
1589 | def __init__(self): | |
1590 | self.rhodecode_ui_ids = [] |
|
1590 | self.rhodecode_ui_ids = [] | |
1591 | self.rhodecode_setting_ids = [] |
|
1591 | self.rhodecode_setting_ids = [] | |
1592 | self.repo_rhodecode_ui_ids = [] |
|
1592 | self.repo_rhodecode_ui_ids = [] | |
1593 | self.repo_rhodecode_setting_ids = [] |
|
1593 | self.repo_rhodecode_setting_ids = [] | |
1594 |
|
1594 | |||
1595 | def create_repo_rhodecode_ui( |
|
1595 | def create_repo_rhodecode_ui( | |
1596 | self, repo, section, value, key=None, active=True, cleanup=True): |
|
1596 | self, repo, section, value, key=None, active=True, cleanup=True): | |
1597 | key = key or hashlib.sha1( |
|
1597 | key = key or hashlib.sha1( | |
1598 | '{}{}{}'.format(section, value, repo.repo_id)).hexdigest() |
|
1598 | '{}{}{}'.format(section, value, repo.repo_id)).hexdigest() | |
1599 |
|
1599 | |||
1600 | setting = RepoRhodeCodeUi() |
|
1600 | setting = RepoRhodeCodeUi() | |
1601 | setting.repository_id = repo.repo_id |
|
1601 | setting.repository_id = repo.repo_id | |
1602 | setting.ui_section = section |
|
1602 | setting.ui_section = section | |
1603 | setting.ui_value = value |
|
1603 | setting.ui_value = value | |
1604 | setting.ui_key = key |
|
1604 | setting.ui_key = key | |
1605 | setting.ui_active = active |
|
1605 | setting.ui_active = active | |
1606 | Session().add(setting) |
|
1606 | Session().add(setting) | |
1607 | Session().commit() |
|
1607 | Session().commit() | |
1608 |
|
1608 | |||
1609 | if cleanup: |
|
1609 | if cleanup: | |
1610 | self.repo_rhodecode_ui_ids.append(setting.ui_id) |
|
1610 | self.repo_rhodecode_ui_ids.append(setting.ui_id) | |
1611 | return setting |
|
1611 | return setting | |
1612 |
|
1612 | |||
1613 | def create_rhodecode_ui( |
|
1613 | def create_rhodecode_ui( | |
1614 | self, section, value, key=None, active=True, cleanup=True): |
|
1614 | self, section, value, key=None, active=True, cleanup=True): | |
1615 | key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest() |
|
1615 | key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest() | |
1616 |
|
1616 | |||
1617 | setting = RhodeCodeUi() |
|
1617 | setting = RhodeCodeUi() | |
1618 | setting.ui_section = section |
|
1618 | setting.ui_section = section | |
1619 | setting.ui_value = value |
|
1619 | setting.ui_value = value | |
1620 | setting.ui_key = key |
|
1620 | setting.ui_key = key | |
1621 | setting.ui_active = active |
|
1621 | setting.ui_active = active | |
1622 | Session().add(setting) |
|
1622 | Session().add(setting) | |
1623 | Session().commit() |
|
1623 | Session().commit() | |
1624 |
|
1624 | |||
1625 | if cleanup: |
|
1625 | if cleanup: | |
1626 | self.rhodecode_ui_ids.append(setting.ui_id) |
|
1626 | self.rhodecode_ui_ids.append(setting.ui_id) | |
1627 | return setting |
|
1627 | return setting | |
1628 |
|
1628 | |||
1629 | def create_repo_rhodecode_setting( |
|
1629 | def create_repo_rhodecode_setting( | |
1630 | self, repo, name, value, type_, cleanup=True): |
|
1630 | self, repo, name, value, type_, cleanup=True): | |
1631 | setting = RepoRhodeCodeSetting( |
|
1631 | setting = RepoRhodeCodeSetting( | |
1632 | repo.repo_id, key=name, val=value, type=type_) |
|
1632 | repo.repo_id, key=name, val=value, type=type_) | |
1633 | Session().add(setting) |
|
1633 | Session().add(setting) | |
1634 | Session().commit() |
|
1634 | Session().commit() | |
1635 |
|
1635 | |||
1636 | if cleanup: |
|
1636 | if cleanup: | |
1637 | self.repo_rhodecode_setting_ids.append(setting.app_settings_id) |
|
1637 | self.repo_rhodecode_setting_ids.append(setting.app_settings_id) | |
1638 | return setting |
|
1638 | return setting | |
1639 |
|
1639 | |||
1640 | def create_rhodecode_setting(self, name, value, type_, cleanup=True): |
|
1640 | def create_rhodecode_setting(self, name, value, type_, cleanup=True): | |
1641 | setting = RhodeCodeSetting(key=name, val=value, type=type_) |
|
1641 | setting = RhodeCodeSetting(key=name, val=value, type=type_) | |
1642 | Session().add(setting) |
|
1642 | Session().add(setting) | |
1643 | Session().commit() |
|
1643 | Session().commit() | |
1644 |
|
1644 | |||
1645 | if cleanup: |
|
1645 | if cleanup: | |
1646 | self.rhodecode_setting_ids.append(setting.app_settings_id) |
|
1646 | self.rhodecode_setting_ids.append(setting.app_settings_id) | |
1647 |
|
1647 | |||
1648 | return setting |
|
1648 | return setting | |
1649 |
|
1649 | |||
1650 | def cleanup(self): |
|
1650 | def cleanup(self): | |
1651 | for id_ in self.rhodecode_ui_ids: |
|
1651 | for id_ in self.rhodecode_ui_ids: | |
1652 | setting = RhodeCodeUi.get(id_) |
|
1652 | setting = RhodeCodeUi.get(id_) | |
1653 | Session().delete(setting) |
|
1653 | Session().delete(setting) | |
1654 |
|
1654 | |||
1655 | for id_ in self.rhodecode_setting_ids: |
|
1655 | for id_ in self.rhodecode_setting_ids: | |
1656 | setting = RhodeCodeSetting.get(id_) |
|
1656 | setting = RhodeCodeSetting.get(id_) | |
1657 | Session().delete(setting) |
|
1657 | Session().delete(setting) | |
1658 |
|
1658 | |||
1659 | for id_ in self.repo_rhodecode_ui_ids: |
|
1659 | for id_ in self.repo_rhodecode_ui_ids: | |
1660 | setting = RepoRhodeCodeUi.get(id_) |
|
1660 | setting = RepoRhodeCodeUi.get(id_) | |
1661 | Session().delete(setting) |
|
1661 | Session().delete(setting) | |
1662 |
|
1662 | |||
1663 | for id_ in self.repo_rhodecode_setting_ids: |
|
1663 | for id_ in self.repo_rhodecode_setting_ids: | |
1664 | setting = RepoRhodeCodeSetting.get(id_) |
|
1664 | setting = RepoRhodeCodeSetting.get(id_) | |
1665 | Session().delete(setting) |
|
1665 | Session().delete(setting) | |
1666 |
|
1666 | |||
1667 | Session().commit() |
|
1667 | Session().commit() | |
1668 |
|
1668 | |||
1669 |
|
1669 | |||
1670 | @pytest.fixture |
|
1670 | @pytest.fixture | |
1671 | def no_notifications(request): |
|
1671 | def no_notifications(request): | |
1672 | notification_patcher = mock.patch( |
|
1672 | notification_patcher = mock.patch( | |
1673 | 'rhodecode.model.notification.NotificationModel.create') |
|
1673 | 'rhodecode.model.notification.NotificationModel.create') | |
1674 | notification_patcher.start() |
|
1674 | notification_patcher.start() | |
1675 | request.addfinalizer(notification_patcher.stop) |
|
1675 | request.addfinalizer(notification_patcher.stop) | |
1676 |
|
1676 | |||
1677 |
|
1677 | |||
1678 | @pytest.fixture(scope='session') |
|
1678 | @pytest.fixture(scope='session') | |
1679 | def repeat(request): |
|
1679 | def repeat(request): | |
1680 | """ |
|
1680 | """ | |
1681 | The number of repetitions is based on this fixture. |
|
1681 | The number of repetitions is based on this fixture. | |
1682 |
|
1682 | |||
1683 | Slower calls may divide it by 10 or 100. It is chosen in a way so that the |
|
1683 | Slower calls may divide it by 10 or 100. It is chosen in a way so that the | |
1684 | tests are not too slow in our default test suite. |
|
1684 | tests are not too slow in our default test suite. | |
1685 | """ |
|
1685 | """ | |
1686 | return request.config.getoption('--repeat') |
|
1686 | return request.config.getoption('--repeat') | |
1687 |
|
1687 | |||
1688 |
|
1688 | |||
1689 | @pytest.fixture |
|
1689 | @pytest.fixture | |
1690 | def rhodecode_fixtures(): |
|
1690 | def rhodecode_fixtures(): | |
1691 | return Fixture() |
|
1691 | return Fixture() | |
1692 |
|
1692 | |||
1693 |
|
1693 | |||
1694 | @pytest.fixture |
|
1694 | @pytest.fixture | |
1695 | def context_stub(): |
|
1695 | def context_stub(): | |
1696 | """ |
|
1696 | """ | |
1697 | Stub context object. |
|
1697 | Stub context object. | |
1698 | """ |
|
1698 | """ | |
1699 | context = pyramid.testing.DummyResource() |
|
1699 | context = pyramid.testing.DummyResource() | |
1700 | return context |
|
1700 | return context | |
1701 |
|
1701 | |||
1702 |
|
1702 | |||
1703 | @pytest.fixture |
|
1703 | @pytest.fixture | |
1704 | def request_stub(): |
|
1704 | def request_stub(): | |
1705 | """ |
|
1705 | """ | |
1706 | Stub request object. |
|
1706 | Stub request object. | |
1707 | """ |
|
1707 | """ | |
1708 | from rhodecode.lib.base import bootstrap_request |
|
1708 | from rhodecode.lib.base import bootstrap_request | |
1709 | request = bootstrap_request(scheme='https') |
|
1709 | request = bootstrap_request(scheme='https') | |
1710 | return request |
|
1710 | return request | |
1711 |
|
1711 | |||
1712 |
|
1712 | |||
1713 | @pytest.fixture |
|
1713 | @pytest.fixture | |
1714 | def config_stub(request, request_stub): |
|
1714 | def config_stub(request, request_stub): | |
1715 | """ |
|
1715 | """ | |
1716 | Set up pyramid.testing and return the Configurator. |
|
1716 | Set up pyramid.testing and return the Configurator. | |
1717 | """ |
|
1717 | """ | |
1718 | from rhodecode.lib.base import bootstrap_config |
|
1718 | from rhodecode.lib.base import bootstrap_config | |
1719 | config = bootstrap_config(request=request_stub) |
|
1719 | config = bootstrap_config(request=request_stub) | |
1720 |
|
1720 | |||
1721 | @request.addfinalizer |
|
1721 | @request.addfinalizer | |
1722 | def cleanup(): |
|
1722 | def cleanup(): | |
1723 | pyramid.testing.tearDown() |
|
1723 | pyramid.testing.tearDown() | |
1724 |
|
1724 | |||
1725 | return config |
|
1725 | return config | |
1726 |
|
1726 | |||
1727 |
|
1727 | |||
1728 | @pytest.fixture |
|
1728 | @pytest.fixture | |
1729 | def StubIntegrationType(): |
|
1729 | def StubIntegrationType(): | |
1730 | class _StubIntegrationType(IntegrationTypeBase): |
|
1730 | class _StubIntegrationType(IntegrationTypeBase): | |
1731 | """ Test integration type class """ |
|
1731 | """ Test integration type class """ | |
1732 |
|
1732 | |||
1733 | key = 'test' |
|
1733 | key = 'test' | |
1734 | display_name = 'Test integration type' |
|
1734 | display_name = 'Test integration type' | |
1735 | description = 'A test integration type for testing' |
|
1735 | description = 'A test integration type for testing' | |
1736 |
|
1736 | |||
1737 | @classmethod |
|
1737 | @classmethod | |
1738 | def icon(cls): |
|
1738 | def icon(cls): | |
1739 | return 'test_icon_html_image' |
|
1739 | return 'test_icon_html_image' | |
1740 |
|
1740 | |||
1741 | def __init__(self, settings): |
|
1741 | def __init__(self, settings): | |
1742 | super(_StubIntegrationType, self).__init__(settings) |
|
1742 | super(_StubIntegrationType, self).__init__(settings) | |
1743 | self.sent_events = [] # for testing |
|
1743 | self.sent_events = [] # for testing | |
1744 |
|
1744 | |||
1745 | def send_event(self, event): |
|
1745 | def send_event(self, event): | |
1746 | self.sent_events.append(event) |
|
1746 | self.sent_events.append(event) | |
1747 |
|
1747 | |||
1748 | def settings_schema(self): |
|
1748 | def settings_schema(self): | |
1749 | class SettingsSchema(colander.Schema): |
|
1749 | class SettingsSchema(colander.Schema): | |
1750 | test_string_field = colander.SchemaNode( |
|
1750 | test_string_field = colander.SchemaNode( | |
1751 | colander.String(), |
|
1751 | colander.String(), | |
1752 | missing=colander.required, |
|
1752 | missing=colander.required, | |
1753 | title='test string field', |
|
1753 | title='test string field', | |
1754 | ) |
|
1754 | ) | |
1755 | test_int_field = colander.SchemaNode( |
|
1755 | test_int_field = colander.SchemaNode( | |
1756 | colander.Int(), |
|
1756 | colander.Int(), | |
1757 | title='some integer setting', |
|
1757 | title='some integer setting', | |
1758 | ) |
|
1758 | ) | |
1759 | return SettingsSchema() |
|
1759 | return SettingsSchema() | |
1760 |
|
1760 | |||
1761 |
|
1761 | |||
1762 | integration_type_registry.register_integration_type(_StubIntegrationType) |
|
1762 | integration_type_registry.register_integration_type(_StubIntegrationType) | |
1763 | return _StubIntegrationType |
|
1763 | return _StubIntegrationType | |
1764 |
|
1764 | |||
1765 | @pytest.fixture |
|
1765 | @pytest.fixture | |
1766 | def stub_integration_settings(): |
|
1766 | def stub_integration_settings(): | |
1767 | return { |
|
1767 | return { | |
1768 | 'test_string_field': 'some data', |
|
1768 | 'test_string_field': 'some data', | |
1769 | 'test_int_field': 100, |
|
1769 | 'test_int_field': 100, | |
1770 | } |
|
1770 | } | |
1771 |
|
1771 | |||
1772 |
|
1772 | |||
1773 | @pytest.fixture |
|
1773 | @pytest.fixture | |
1774 | def repo_integration_stub(request, repo_stub, StubIntegrationType, |
|
1774 | def repo_integration_stub(request, repo_stub, StubIntegrationType, | |
1775 | stub_integration_settings): |
|
1775 | stub_integration_settings): | |
1776 | integration = IntegrationModel().create( |
|
1776 | integration = IntegrationModel().create( | |
1777 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1777 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
1778 | name='test repo integration', |
|
1778 | name='test repo integration', | |
1779 | repo=repo_stub, repo_group=None, child_repos_only=None) |
|
1779 | repo=repo_stub, repo_group=None, child_repos_only=None) | |
1780 |
|
1780 | |||
1781 | @request.addfinalizer |
|
1781 | @request.addfinalizer | |
1782 | def cleanup(): |
|
1782 | def cleanup(): | |
1783 | IntegrationModel().delete(integration) |
|
1783 | IntegrationModel().delete(integration) | |
1784 |
|
1784 | |||
1785 | return integration |
|
1785 | return integration | |
1786 |
|
1786 | |||
1787 |
|
1787 | |||
1788 | @pytest.fixture |
|
1788 | @pytest.fixture | |
1789 | def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, |
|
1789 | def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, | |
1790 | stub_integration_settings): |
|
1790 | stub_integration_settings): | |
1791 | integration = IntegrationModel().create( |
|
1791 | integration = IntegrationModel().create( | |
1792 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1792 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
1793 | name='test repogroup integration', |
|
1793 | name='test repogroup integration', | |
1794 | repo=None, repo_group=test_repo_group, child_repos_only=True) |
|
1794 | repo=None, repo_group=test_repo_group, child_repos_only=True) | |
1795 |
|
1795 | |||
1796 | @request.addfinalizer |
|
1796 | @request.addfinalizer | |
1797 | def cleanup(): |
|
1797 | def cleanup(): | |
1798 | IntegrationModel().delete(integration) |
|
1798 | IntegrationModel().delete(integration) | |
1799 |
|
1799 | |||
1800 | return integration |
|
1800 | return integration | |
1801 |
|
1801 | |||
1802 |
|
1802 | |||
1803 | @pytest.fixture |
|
1803 | @pytest.fixture | |
1804 | def repogroup_recursive_integration_stub(request, test_repo_group, |
|
1804 | def repogroup_recursive_integration_stub(request, test_repo_group, | |
1805 | StubIntegrationType, stub_integration_settings): |
|
1805 | StubIntegrationType, stub_integration_settings): | |
1806 | integration = IntegrationModel().create( |
|
1806 | integration = IntegrationModel().create( | |
1807 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1807 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
1808 | name='test recursive repogroup integration', |
|
1808 | name='test recursive repogroup integration', | |
1809 | repo=None, repo_group=test_repo_group, child_repos_only=False) |
|
1809 | repo=None, repo_group=test_repo_group, child_repos_only=False) | |
1810 |
|
1810 | |||
1811 | @request.addfinalizer |
|
1811 | @request.addfinalizer | |
1812 | def cleanup(): |
|
1812 | def cleanup(): | |
1813 | IntegrationModel().delete(integration) |
|
1813 | IntegrationModel().delete(integration) | |
1814 |
|
1814 | |||
1815 | return integration |
|
1815 | return integration | |
1816 |
|
1816 | |||
1817 |
|
1817 | |||
1818 | @pytest.fixture |
|
1818 | @pytest.fixture | |
1819 | def global_integration_stub(request, StubIntegrationType, |
|
1819 | def global_integration_stub(request, StubIntegrationType, | |
1820 | stub_integration_settings): |
|
1820 | stub_integration_settings): | |
1821 | integration = IntegrationModel().create( |
|
1821 | integration = IntegrationModel().create( | |
1822 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1822 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
1823 | name='test global integration', |
|
1823 | name='test global integration', | |
1824 | repo=None, repo_group=None, child_repos_only=None) |
|
1824 | repo=None, repo_group=None, child_repos_only=None) | |
1825 |
|
1825 | |||
1826 | @request.addfinalizer |
|
1826 | @request.addfinalizer | |
1827 | def cleanup(): |
|
1827 | def cleanup(): | |
1828 | IntegrationModel().delete(integration) |
|
1828 | IntegrationModel().delete(integration) | |
1829 |
|
1829 | |||
1830 | return integration |
|
1830 | return integration | |
1831 |
|
1831 | |||
1832 |
|
1832 | |||
1833 | @pytest.fixture |
|
1833 | @pytest.fixture | |
1834 | def root_repos_integration_stub(request, StubIntegrationType, |
|
1834 | def root_repos_integration_stub(request, StubIntegrationType, | |
1835 | stub_integration_settings): |
|
1835 | stub_integration_settings): | |
1836 | integration = IntegrationModel().create( |
|
1836 | integration = IntegrationModel().create( | |
1837 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1837 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
1838 | name='test global integration', |
|
1838 | name='test global integration', | |
1839 | repo=None, repo_group=None, child_repos_only=True) |
|
1839 | repo=None, repo_group=None, child_repos_only=True) | |
1840 |
|
1840 | |||
1841 | @request.addfinalizer |
|
1841 | @request.addfinalizer | |
1842 | def cleanup(): |
|
1842 | def cleanup(): | |
1843 | IntegrationModel().delete(integration) |
|
1843 | IntegrationModel().delete(integration) | |
1844 |
|
1844 | |||
1845 | return integration |
|
1845 | return integration | |
1846 |
|
1846 | |||
1847 |
|
1847 | |||
1848 | @pytest.fixture |
|
1848 | @pytest.fixture | |
1849 | def local_dt_to_utc(): |
|
1849 | def local_dt_to_utc(): | |
1850 | def _factory(dt): |
|
1850 | def _factory(dt): | |
1851 | return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone( |
|
1851 | return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone( | |
1852 | dateutil.tz.tzutc()).replace(tzinfo=None) |
|
1852 | dateutil.tz.tzutc()).replace(tzinfo=None) | |
1853 | return _factory |
|
1853 | return _factory | |
1854 |
|
1854 | |||
1855 |
|
1855 | |||
1856 | @pytest.fixture |
|
1856 | @pytest.fixture | |
1857 | def disable_anonymous_user(request, baseapp): |
|
1857 | def disable_anonymous_user(request, baseapp): | |
1858 | set_anonymous_access(False) |
|
1858 | set_anonymous_access(False) | |
1859 |
|
1859 | |||
1860 | @request.addfinalizer |
|
1860 | @request.addfinalizer | |
1861 | def cleanup(): |
|
1861 | def cleanup(): | |
1862 | set_anonymous_access(True) |
|
1862 | set_anonymous_access(True) | |
1863 |
|
1863 | |||
1864 |
|
1864 | |||
1865 | @pytest.fixture(scope='module') |
|
1865 | @pytest.fixture(scope='module') | |
1866 | def rc_fixture(request): |
|
1866 | def rc_fixture(request): | |
1867 | return Fixture() |
|
1867 | return Fixture() | |
1868 |
|
1868 | |||
1869 |
|
1869 | |||
1870 | @pytest.fixture |
|
1870 | @pytest.fixture | |
1871 | def repo_groups(request): |
|
1871 | def repo_groups(request): | |
1872 | fixture = Fixture() |
|
1872 | fixture = Fixture() | |
1873 |
|
1873 | |||
1874 | session = Session() |
|
1874 | session = Session() | |
1875 | zombie_group = fixture.create_repo_group('zombie') |
|
1875 | zombie_group = fixture.create_repo_group('zombie') | |
1876 | parent_group = fixture.create_repo_group('parent') |
|
1876 | parent_group = fixture.create_repo_group('parent') | |
1877 | child_group = fixture.create_repo_group('parent/child') |
|
1877 | child_group = fixture.create_repo_group('parent/child') | |
1878 | groups_in_db = session.query(RepoGroup).all() |
|
1878 | groups_in_db = session.query(RepoGroup).all() | |
1879 | assert len(groups_in_db) == 3 |
|
1879 | assert len(groups_in_db) == 3 | |
1880 | assert child_group.group_parent_id == parent_group.group_id |
|
1880 | assert child_group.group_parent_id == parent_group.group_id | |
1881 |
|
1881 | |||
1882 | @request.addfinalizer |
|
1882 | @request.addfinalizer | |
1883 | def cleanup(): |
|
1883 | def cleanup(): | |
1884 | fixture.destroy_repo_group(zombie_group) |
|
1884 | fixture.destroy_repo_group(zombie_group) | |
1885 | fixture.destroy_repo_group(child_group) |
|
1885 | fixture.destroy_repo_group(child_group) | |
1886 | fixture.destroy_repo_group(parent_group) |
|
1886 | fixture.destroy_repo_group(parent_group) | |
1887 |
|
1887 | |||
1888 | return zombie_group, parent_group, child_group |
|
1888 | return zombie_group, parent_group, child_group | |
1889 |
|
1889 | |||
1890 |
|
1890 | |||
1891 | @pytest.fixture(scope="session") |
|
1891 | @pytest.fixture(scope="session") | |
1892 | def tmp_path_factory(request): |
|
1892 | def tmp_path_factory(request): | |
1893 | """Return a :class:`_pytest.tmpdir.TempPathFactory` instance for the test session. |
|
1893 | """Return a :class:`_pytest.tmpdir.TempPathFactory` instance for the test session. | |
1894 | """ |
|
1894 | """ | |
1895 |
|
1895 | |||
1896 | class TempPathFactory: |
|
1896 | class TempPathFactory: | |
1897 |
|
1897 | |||
1898 | def mktemp(self, basename): |
|
1898 | def mktemp(self, basename): | |
1899 | import tempfile |
|
1899 | import tempfile | |
1900 | return tempfile.mktemp(basename) |
|
1900 | return tempfile.mktemp(basename) | |
1901 |
|
1901 | |||
1902 | return TempPathFactory() |
|
1902 | return TempPathFactory() |
General Comments 0
You need to be logged in to leave comments.
Login now