Show More
@@ -0,0 +1,141 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |||
|
4 | # | |||
|
5 | # This program is free software: you can redistribute it and/or modify | |||
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
7 | # (only), as published by the Free Software Foundation. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU Affero General Public License | |||
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
16 | # | |||
|
17 | # This program is dual-licensed. If you wish to learn more about the | |||
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
20 | ||||
|
21 | """ | |||
|
22 | Test suite for making push/pull operations, on specially modified INI files | |||
|
23 | ||||
|
24 | .. important:: | |||
|
25 | ||||
|
26 | You must have git >= 1.8.5 for tests to work fine. With 68b939b git started | |||
|
27 | to redirect things to stderr instead of stdout. | |||
|
28 | """ | |||
|
29 | ||||
|
30 | import pytest | |||
|
31 | import requests | |||
|
32 | ||||
|
33 | from rhodecode import events | |||
|
34 | from rhodecode.model.db import Integration | |||
|
35 | from rhodecode.model.integration import IntegrationModel | |||
|
36 | from rhodecode.model.meta import Session | |||
|
37 | ||||
|
38 | from rhodecode.tests import GIT_REPO, HG_REPO | |||
|
39 | from rhodecode.tests.other.vcs_operations import Command, _add_files_and_push | |||
|
40 | from rhodecode.integrations.types.webhook import WebhookIntegrationType | |||
|
41 | ||||
|
42 | ||||
|
43 | def check_connection(): | |||
|
44 | try: | |||
|
45 | response = requests.get('http://httpbin.org') | |||
|
46 | return response.status_code == 200 | |||
|
47 | except Exception as e: | |||
|
48 | print(e) | |||
|
49 | ||||
|
50 | return False | |||
|
51 | ||||
|
52 | ||||
|
53 | connection_available = pytest.mark.skipif( | |||
|
54 | not check_connection(), reason="No outside internet connection available") | |||
|
55 | ||||
|
56 | ||||
|
57 | @pytest.fixture | |||
|
58 | def enable_webhook_push_integration(request): | |||
|
59 | integration = Integration() | |||
|
60 | integration.integration_type = WebhookIntegrationType.key | |||
|
61 | Session().add(integration) | |||
|
62 | ||||
|
63 | settings = dict( | |||
|
64 | url='http://httpbin.org', | |||
|
65 | secret_token='secret', | |||
|
66 | username=None, | |||
|
67 | password=None, | |||
|
68 | custom_header_key=None, | |||
|
69 | custom_header_val=None, | |||
|
70 | method_type='get', | |||
|
71 | events=[events.RepoPushEvent.name], | |||
|
72 | log_data=True | |||
|
73 | ) | |||
|
74 | ||||
|
75 | IntegrationModel().update_integration( | |||
|
76 | integration, | |||
|
77 | name='IntegrationWebhookTest', | |||
|
78 | enabled=True, | |||
|
79 | settings=settings, | |||
|
80 | repo=None, | |||
|
81 | repo_group=None, | |||
|
82 | child_repos_only=False, | |||
|
83 | ) | |||
|
84 | Session().commit() | |||
|
85 | integration_id = integration.integration_id | |||
|
86 | ||||
|
87 | @request.addfinalizer | |||
|
88 | def cleanup(): | |||
|
89 | integration = Integration.get(integration_id) | |||
|
90 | Session().delete(integration) | |||
|
91 | Session().commit() | |||
|
92 | ||||
|
93 | ||||
|
94 | @pytest.mark.usefixtures( | |||
|
95 | "disable_locking", "disable_anonymous_user", | |||
|
96 | "enable_webhook_push_integration") | |||
|
97 | class TestVCSOperationsOnCustomIniConfig(object): | |||
|
98 | ||||
|
99 | def test_push_tag_with_commit_hg(self, rc_web_server, tmpdir): | |||
|
100 | clone_url = rc_web_server.repo_clone_url(HG_REPO) | |||
|
101 | stdout, stderr = Command('/tmp').execute( | |||
|
102 | 'hg clone', clone_url, tmpdir.strpath) | |||
|
103 | ||||
|
104 | push_url = rc_web_server.repo_clone_url(HG_REPO) | |||
|
105 | _add_files_and_push( | |||
|
106 | 'hg', tmpdir.strpath, clone_url=push_url, | |||
|
107 | tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}]) | |||
|
108 | ||||
|
109 | rc_log = rc_web_server.get_rc_log() | |||
|
110 | assert 'ERROR' not in rc_log | |||
|
111 | assert "'name': u'v1.0.0'" in rc_log | |||
|
112 | ||||
|
113 | def test_push_tag_with_commit_git( | |||
|
114 | self, rc_web_server, tmpdir): | |||
|
115 | clone_url = rc_web_server.repo_clone_url(GIT_REPO) | |||
|
116 | stdout, stderr = Command('/tmp').execute( | |||
|
117 | 'git clone', clone_url, tmpdir.strpath) | |||
|
118 | ||||
|
119 | push_url = rc_web_server.repo_clone_url(GIT_REPO) | |||
|
120 | _add_files_and_push( | |||
|
121 | 'git', tmpdir.strpath, clone_url=push_url, | |||
|
122 | tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}]) | |||
|
123 | ||||
|
124 | rc_log = rc_web_server.get_rc_log() | |||
|
125 | assert 'ERROR' not in rc_log | |||
|
126 | assert "'name': u'v1.0.0'" in rc_log | |||
|
127 | ||||
|
128 | def test_push_tag_with_no_commit_git( | |||
|
129 | self, rc_web_server, tmpdir): | |||
|
130 | clone_url = rc_web_server.repo_clone_url(GIT_REPO) | |||
|
131 | stdout, stderr = Command('/tmp').execute( | |||
|
132 | 'git clone', clone_url, tmpdir.strpath) | |||
|
133 | ||||
|
134 | push_url = rc_web_server.repo_clone_url(GIT_REPO) | |||
|
135 | _add_files_and_push( | |||
|
136 | 'git', tmpdir.strpath, clone_url=push_url, | |||
|
137 | tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}]) | |||
|
138 | ||||
|
139 | rc_log = rc_web_server.get_rc_log() | |||
|
140 | assert 'ERROR' not in rc_log | |||
|
141 | assert "'name': u'v1.0.0'" in rc_log |
@@ -1,283 +1,355 b'' | |||||
1 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
1 | # Copyright (C) 2016-2017 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import collections |
|
19 | import collections | |
20 | import logging |
|
20 | import logging | |
|
21 | import datetime | |||
21 |
|
22 | |||
22 | from rhodecode.translation import lazy_ugettext |
|
23 | from rhodecode.translation import lazy_ugettext | |
23 | from rhodecode.model.db import User, Repository, Session |
|
24 | from rhodecode.model.db import User, Repository, Session | |
24 | from rhodecode.events.base import RhodecodeEvent |
|
25 | from rhodecode.events.base import RhodecodeEvent | |
25 | from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError |
|
26 | from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError | |
26 |
|
27 | |||
27 | log = logging.getLogger(__name__) |
|
28 | log = logging.getLogger(__name__) | |
28 |
|
29 | |||
29 |
|
30 | |||
30 | def _commits_as_dict(event, commit_ids, repos): |
|
31 | def _commits_as_dict(event, commit_ids, repos): | |
31 | """ |
|
32 | """ | |
32 | Helper function to serialize commit_ids |
|
33 | Helper function to serialize commit_ids | |
33 |
|
34 | |||
34 | :param event: class calling this method |
|
35 | :param event: class calling this method | |
35 | :param commit_ids: commits to get |
|
36 | :param commit_ids: commits to get | |
36 | :param repos: list of repos to check |
|
37 | :param repos: list of repos to check | |
37 | """ |
|
38 | """ | |
38 | from rhodecode.lib.utils2 import extract_mentioned_users |
|
39 | from rhodecode.lib.utils2 import extract_mentioned_users | |
39 | from rhodecode.lib.helpers import ( |
|
40 | from rhodecode.lib.helpers import ( | |
40 | urlify_commit_message, process_patterns, chop_at_smart) |
|
41 | urlify_commit_message, process_patterns, chop_at_smart) | |
41 | from rhodecode.model.repo import RepoModel |
|
42 | from rhodecode.model.repo import RepoModel | |
42 |
|
43 | |||
43 | if not repos: |
|
44 | if not repos: | |
44 | raise Exception('no repo defined') |
|
45 | raise Exception('no repo defined') | |
45 |
|
46 | |||
46 | if not isinstance(repos, (tuple, list)): |
|
47 | if not isinstance(repos, (tuple, list)): | |
47 | repos = [repos] |
|
48 | repos = [repos] | |
48 |
|
49 | |||
49 | if not commit_ids: |
|
50 | if not commit_ids: | |
50 | return [] |
|
51 | return [] | |
51 |
|
52 | |||
52 | needed_commits = list(commit_ids) |
|
53 | needed_commits = list(commit_ids) | |
53 |
|
54 | |||
54 | commits = [] |
|
55 | commits = [] | |
55 | reviewers = [] |
|
56 | reviewers = [] | |
56 | for repo in repos: |
|
57 | for repo in repos: | |
57 | if not needed_commits: |
|
58 | if not needed_commits: | |
58 | return commits # return early if we have the commits we need |
|
59 | return commits # return early if we have the commits we need | |
59 |
|
60 | |||
60 | vcs_repo = repo.scm_instance(cache=False) |
|
61 | vcs_repo = repo.scm_instance(cache=False) | |
|
62 | ||||
61 | try: |
|
63 | try: | |
62 | # use copy of needed_commits since we modify it while iterating |
|
64 | # use copy of needed_commits since we modify it while iterating | |
63 | for commit_id in list(needed_commits): |
|
65 | for commit_id in list(needed_commits): | |
64 | try: |
|
66 | if commit_id.startswith('tag=>'): | |
65 |
|
|
67 | raw_id = commit_id[5:] | |
66 | except CommitDoesNotExistError: |
|
68 | cs_data = { | |
67 | continue # maybe its in next repo |
|
69 | 'raw_id': commit_id, 'short_id': commit_id, | |
|
70 | 'branch': None, | |||
|
71 | 'git_ref_change': 'tag_add', | |||
|
72 | 'message': 'Added new tag {}'.format(raw_id), | |||
|
73 | 'author': event.actor.full_contact, | |||
|
74 | 'date': datetime.datetime.now(), | |||
|
75 | 'refs': { | |||
|
76 | 'branches': [], | |||
|
77 | 'bookmarks': [], | |||
|
78 | 'tags': [] | |||
|
79 | } | |||
|
80 | } | |||
|
81 | commits.append(cs_data) | |||
68 |
|
82 | |||
69 | cs_data = cs.__json__() |
|
83 | elif commit_id.startswith('delete_branch=>'): | |
70 | cs_data['refs'] = cs._get_refs() |
|
84 | raw_id = commit_id[15:] | |
|
85 | cs_data = { | |||
|
86 | 'raw_id': commit_id, 'short_id': commit_id, | |||
|
87 | 'branch': None, | |||
|
88 | 'git_ref_change': 'branch_delete', | |||
|
89 | 'message': 'Deleted branch {}'.format(raw_id), | |||
|
90 | 'author': event.actor.full_contact, | |||
|
91 | 'date': datetime.datetime.now(), | |||
|
92 | 'refs': { | |||
|
93 | 'branches': [], | |||
|
94 | 'bookmarks': [], | |||
|
95 | 'tags': [] | |||
|
96 | } | |||
|
97 | } | |||
|
98 | commits.append(cs_data) | |||
|
99 | ||||
|
100 | else: | |||
|
101 | try: | |||
|
102 | cs = vcs_repo.get_changeset(commit_id) | |||
|
103 | except CommitDoesNotExistError: | |||
|
104 | continue # maybe its in next repo | |||
|
105 | ||||
|
106 | cs_data = cs.__json__() | |||
|
107 | cs_data['refs'] = cs._get_refs() | |||
|
108 | ||||
71 | cs_data['mentions'] = extract_mentioned_users(cs_data['message']) |
|
109 | cs_data['mentions'] = extract_mentioned_users(cs_data['message']) | |
72 | cs_data['reviewers'] = reviewers |
|
110 | cs_data['reviewers'] = reviewers | |
73 | cs_data['url'] = RepoModel().get_commit_url( |
|
111 | cs_data['url'] = RepoModel().get_commit_url( | |
74 | repo, cs_data['raw_id'], request=event.request) |
|
112 | repo, cs_data['raw_id'], request=event.request) | |
75 | cs_data['permalink_url'] = RepoModel().get_commit_url( |
|
113 | cs_data['permalink_url'] = RepoModel().get_commit_url( | |
76 |
repo, cs_data['raw_id'], request=event.request, |
|
114 | repo, cs_data['raw_id'], request=event.request, | |
|
115 | permalink=True) | |||
77 | urlified_message, issues_data = process_patterns( |
|
116 | urlified_message, issues_data = process_patterns( | |
78 | cs_data['message'], repo.repo_name) |
|
117 | cs_data['message'], repo.repo_name) | |
79 | cs_data['issues'] = issues_data |
|
118 | cs_data['issues'] = issues_data | |
80 | cs_data['message_html'] = urlify_commit_message( |
|
119 | cs_data['message_html'] = urlify_commit_message( | |
81 | cs_data['message'], repo.repo_name) |
|
120 | cs_data['message'], repo.repo_name) | |
82 | cs_data['message_html_title'] = chop_at_smart( |
|
121 | cs_data['message_html_title'] = chop_at_smart( | |
83 | cs_data['message'], '\n', suffix_if_chopped='...') |
|
122 | cs_data['message'], '\n', suffix_if_chopped='...') | |
84 | commits.append(cs_data) |
|
123 | commits.append(cs_data) | |
85 |
|
124 | |||
86 | needed_commits.remove(commit_id) |
|
125 | needed_commits.remove(commit_id) | |
87 |
|
126 | |||
88 |
except Exception |
|
127 | except Exception: | |
89 | log.exception(e) |
|
128 | log.exception('Failed to extract commits data') | |
90 | # we don't send any commits when crash happens, only full list |
|
129 | # we don't send any commits when crash happens, only full list | |
91 | # matters we short circuit then. |
|
130 | # matters we short circuit then. | |
92 | return [] |
|
131 | return [] | |
93 |
|
132 | |||
94 | missing_commits = set(commit_ids) - set(c['raw_id'] for c in commits) |
|
133 | missing_commits = set(commit_ids) - set(c['raw_id'] for c in commits) | |
95 | if missing_commits: |
|
134 | if missing_commits: | |
96 | log.error('missing commits: %s' % ', '.join(missing_commits)) |
|
135 | log.error('missing commits: %s' % ', '.join(missing_commits)) | |
97 |
|
136 | |||
98 | return commits |
|
137 | return commits | |
99 |
|
138 | |||
100 |
|
139 | |||
101 | def _issues_as_dict(commits): |
|
140 | def _issues_as_dict(commits): | |
102 | """ Helper function to serialize issues from commits """ |
|
141 | """ Helper function to serialize issues from commits """ | |
103 | issues = {} |
|
142 | issues = {} | |
104 | for commit in commits: |
|
143 | for commit in commits: | |
105 | for issue in commit['issues']: |
|
144 | for issue in commit['issues']: | |
106 | issues[issue['id']] = issue |
|
145 | issues[issue['id']] = issue | |
107 | return issues |
|
146 | return issues | |
108 |
|
147 | |||
109 |
|
148 | |||
110 | class RepoEvent(RhodecodeEvent): |
|
149 | class RepoEvent(RhodecodeEvent): | |
111 | """ |
|
150 | """ | |
112 | Base class for events acting on a repository. |
|
151 | Base class for events acting on a repository. | |
113 |
|
152 | |||
114 | :param repo: a :class:`Repository` instance |
|
153 | :param repo: a :class:`Repository` instance | |
115 | """ |
|
154 | """ | |
116 |
|
155 | |||
117 | def __init__(self, repo): |
|
156 | def __init__(self, repo): | |
118 | super(RepoEvent, self).__init__() |
|
157 | super(RepoEvent, self).__init__() | |
119 | self.repo = repo |
|
158 | self.repo = repo | |
120 |
|
159 | |||
121 | def as_dict(self): |
|
160 | def as_dict(self): | |
122 | from rhodecode.model.repo import RepoModel |
|
161 | from rhodecode.model.repo import RepoModel | |
123 | data = super(RepoEvent, self).as_dict() |
|
162 | data = super(RepoEvent, self).as_dict() | |
124 |
|
163 | |||
125 | extra_fields = collections.OrderedDict() |
|
164 | extra_fields = collections.OrderedDict() | |
126 | for field in self.repo.extra_fields: |
|
165 | for field in self.repo.extra_fields: | |
127 | extra_fields[field.field_key] = field.field_value |
|
166 | extra_fields[field.field_key] = field.field_value | |
128 |
|
167 | |||
129 | data.update({ |
|
168 | data.update({ | |
130 | 'repo': { |
|
169 | 'repo': { | |
131 | 'repo_id': self.repo.repo_id, |
|
170 | 'repo_id': self.repo.repo_id, | |
132 | 'repo_name': self.repo.repo_name, |
|
171 | 'repo_name': self.repo.repo_name, | |
133 | 'repo_type': self.repo.repo_type, |
|
172 | 'repo_type': self.repo.repo_type, | |
134 | 'url': RepoModel().get_url( |
|
173 | 'url': RepoModel().get_url( | |
135 | self.repo, request=self.request), |
|
174 | self.repo, request=self.request), | |
136 | 'permalink_url': RepoModel().get_url( |
|
175 | 'permalink_url': RepoModel().get_url( | |
137 | self.repo, request=self.request, permalink=True), |
|
176 | self.repo, request=self.request, permalink=True), | |
138 | 'extra_fields': extra_fields |
|
177 | 'extra_fields': extra_fields | |
139 | } |
|
178 | } | |
140 | }) |
|
179 | }) | |
141 | return data |
|
180 | return data | |
142 |
|
181 | |||
143 |
|
182 | |||
144 | class RepoPreCreateEvent(RepoEvent): |
|
183 | class RepoPreCreateEvent(RepoEvent): | |
145 | """ |
|
184 | """ | |
146 | An instance of this class is emitted as an :term:`event` before a repo is |
|
185 | An instance of this class is emitted as an :term:`event` before a repo is | |
147 | created. |
|
186 | created. | |
148 | """ |
|
187 | """ | |
149 | name = 'repo-pre-create' |
|
188 | name = 'repo-pre-create' | |
150 | display_name = lazy_ugettext('repository pre create') |
|
189 | display_name = lazy_ugettext('repository pre create') | |
151 |
|
190 | |||
152 |
|
191 | |||
153 | class RepoCreateEvent(RepoEvent): |
|
192 | class RepoCreateEvent(RepoEvent): | |
154 | """ |
|
193 | """ | |
155 | An instance of this class is emitted as an :term:`event` whenever a repo is |
|
194 | An instance of this class is emitted as an :term:`event` whenever a repo is | |
156 | created. |
|
195 | created. | |
157 | """ |
|
196 | """ | |
158 | name = 'repo-create' |
|
197 | name = 'repo-create' | |
159 | display_name = lazy_ugettext('repository created') |
|
198 | display_name = lazy_ugettext('repository created') | |
160 |
|
199 | |||
161 |
|
200 | |||
162 | class RepoPreDeleteEvent(RepoEvent): |
|
201 | class RepoPreDeleteEvent(RepoEvent): | |
163 | """ |
|
202 | """ | |
164 | An instance of this class is emitted as an :term:`event` whenever a repo is |
|
203 | An instance of this class is emitted as an :term:`event` whenever a repo is | |
165 | created. |
|
204 | created. | |
166 | """ |
|
205 | """ | |
167 | name = 'repo-pre-delete' |
|
206 | name = 'repo-pre-delete' | |
168 | display_name = lazy_ugettext('repository pre delete') |
|
207 | display_name = lazy_ugettext('repository pre delete') | |
169 |
|
208 | |||
170 |
|
209 | |||
171 | class RepoDeleteEvent(RepoEvent): |
|
210 | class RepoDeleteEvent(RepoEvent): | |
172 | """ |
|
211 | """ | |
173 | An instance of this class is emitted as an :term:`event` whenever a repo is |
|
212 | An instance of this class is emitted as an :term:`event` whenever a repo is | |
174 | created. |
|
213 | created. | |
175 | """ |
|
214 | """ | |
176 | name = 'repo-delete' |
|
215 | name = 'repo-delete' | |
177 | display_name = lazy_ugettext('repository deleted') |
|
216 | display_name = lazy_ugettext('repository deleted') | |
178 |
|
217 | |||
179 |
|
218 | |||
180 | class RepoVCSEvent(RepoEvent): |
|
219 | class RepoVCSEvent(RepoEvent): | |
181 | """ |
|
220 | """ | |
182 | Base class for events triggered by the VCS |
|
221 | Base class for events triggered by the VCS | |
183 | """ |
|
222 | """ | |
184 | def __init__(self, repo_name, extras): |
|
223 | def __init__(self, repo_name, extras): | |
185 | self.repo = Repository.get_by_repo_name(repo_name) |
|
224 | self.repo = Repository.get_by_repo_name(repo_name) | |
186 | if not self.repo: |
|
225 | if not self.repo: | |
187 | raise Exception('repo by this name %s does not exist' % repo_name) |
|
226 | raise Exception('repo by this name %s does not exist' % repo_name) | |
188 | self.extras = extras |
|
227 | self.extras = extras | |
189 | super(RepoVCSEvent, self).__init__(self.repo) |
|
228 | super(RepoVCSEvent, self).__init__(self.repo) | |
190 |
|
229 | |||
191 | @property |
|
230 | @property | |
192 | def actor(self): |
|
231 | def actor(self): | |
193 | if self.extras.get('username'): |
|
232 | if self.extras.get('username'): | |
194 | return User.get_by_username(self.extras['username']) |
|
233 | return User.get_by_username(self.extras['username']) | |
195 |
|
234 | |||
196 | @property |
|
235 | @property | |
197 | def actor_ip(self): |
|
236 | def actor_ip(self): | |
198 | if self.extras.get('ip'): |
|
237 | if self.extras.get('ip'): | |
199 | return self.extras['ip'] |
|
238 | return self.extras['ip'] | |
200 |
|
239 | |||
201 | @property |
|
240 | @property | |
202 | def server_url(self): |
|
241 | def server_url(self): | |
203 | if self.extras.get('server_url'): |
|
242 | if self.extras.get('server_url'): | |
204 | return self.extras['server_url'] |
|
243 | return self.extras['server_url'] | |
205 |
|
244 | |||
206 | @property |
|
245 | @property | |
207 | def request(self): |
|
246 | def request(self): | |
208 | return self.extras.get('request') or self.get_request() |
|
247 | return self.extras.get('request') or self.get_request() | |
209 |
|
248 | |||
210 |
|
249 | |||
211 | class RepoPrePullEvent(RepoVCSEvent): |
|
250 | class RepoPrePullEvent(RepoVCSEvent): | |
212 | """ |
|
251 | """ | |
213 | An instance of this class is emitted as an :term:`event` before commits |
|
252 | An instance of this class is emitted as an :term:`event` before commits | |
214 | are pulled from a repo. |
|
253 | are pulled from a repo. | |
215 | """ |
|
254 | """ | |
216 | name = 'repo-pre-pull' |
|
255 | name = 'repo-pre-pull' | |
217 | display_name = lazy_ugettext('repository pre pull') |
|
256 | display_name = lazy_ugettext('repository pre pull') | |
218 |
|
257 | |||
219 |
|
258 | |||
220 | class RepoPullEvent(RepoVCSEvent): |
|
259 | class RepoPullEvent(RepoVCSEvent): | |
221 | """ |
|
260 | """ | |
222 | An instance of this class is emitted as an :term:`event` after commits |
|
261 | An instance of this class is emitted as an :term:`event` after commits | |
223 | are pulled from a repo. |
|
262 | are pulled from a repo. | |
224 | """ |
|
263 | """ | |
225 | name = 'repo-pull' |
|
264 | name = 'repo-pull' | |
226 | display_name = lazy_ugettext('repository pull') |
|
265 | display_name = lazy_ugettext('repository pull') | |
227 |
|
266 | |||
228 |
|
267 | |||
229 | class RepoPrePushEvent(RepoVCSEvent): |
|
268 | class RepoPrePushEvent(RepoVCSEvent): | |
230 | """ |
|
269 | """ | |
231 | An instance of this class is emitted as an :term:`event` before commits |
|
270 | An instance of this class is emitted as an :term:`event` before commits | |
232 | are pushed to a repo. |
|
271 | are pushed to a repo. | |
233 | """ |
|
272 | """ | |
234 | name = 'repo-pre-push' |
|
273 | name = 'repo-pre-push' | |
235 | display_name = lazy_ugettext('repository pre push') |
|
274 | display_name = lazy_ugettext('repository pre push') | |
236 |
|
275 | |||
237 |
|
276 | |||
238 | class RepoPushEvent(RepoVCSEvent): |
|
277 | class RepoPushEvent(RepoVCSEvent): | |
239 | """ |
|
278 | """ | |
240 | An instance of this class is emitted as an :term:`event` after commits |
|
279 | An instance of this class is emitted as an :term:`event` after commits | |
241 | are pushed to a repo. |
|
280 | are pushed to a repo. | |
242 |
|
281 | |||
243 | :param extras: (optional) dict of data from proxied VCS actions |
|
282 | :param extras: (optional) dict of data from proxied VCS actions | |
244 | """ |
|
283 | """ | |
245 | name = 'repo-push' |
|
284 | name = 'repo-push' | |
246 | display_name = lazy_ugettext('repository push') |
|
285 | display_name = lazy_ugettext('repository push') | |
247 |
|
286 | |||
248 | def __init__(self, repo_name, pushed_commit_ids, extras): |
|
287 | def __init__(self, repo_name, pushed_commit_ids, extras): | |
249 | super(RepoPushEvent, self).__init__(repo_name, extras) |
|
288 | super(RepoPushEvent, self).__init__(repo_name, extras) | |
250 | self.pushed_commit_ids = pushed_commit_ids |
|
289 | self.pushed_commit_ids = pushed_commit_ids | |
|
290 | self.new_refs = extras.new_refs | |||
251 |
|
291 | |||
252 | def as_dict(self): |
|
292 | def as_dict(self): | |
253 | data = super(RepoPushEvent, self).as_dict() |
|
293 | data = super(RepoPushEvent, self).as_dict() | |
254 |
|
294 | |||
255 | def branch_url(branch_name): |
|
295 | def branch_url(branch_name): | |
256 | return '{}/changelog?branch={}'.format( |
|
296 | return '{}/changelog?branch={}'.format( | |
257 | data['repo']['url'], branch_name) |
|
297 | data['repo']['url'], branch_name) | |
258 |
|
298 | |||
|
299 | def tag_url(tag_name): | |||
|
300 | return '{}/files/{}/'.format( | |||
|
301 | data['repo']['url'], tag_name) | |||
|
302 | ||||
259 | commits = _commits_as_dict( |
|
303 | commits = _commits_as_dict( | |
260 | self, commit_ids=self.pushed_commit_ids, repos=[self.repo]) |
|
304 | self, commit_ids=self.pushed_commit_ids, repos=[self.repo]) | |
261 |
|
305 | |||
262 | last_branch = None |
|
306 | last_branch = None | |
263 | for commit in reversed(commits): |
|
307 | for commit in reversed(commits): | |
264 | commit['branch'] = commit['branch'] or last_branch |
|
308 | commit['branch'] = commit['branch'] or last_branch | |
265 | last_branch = commit['branch'] |
|
309 | last_branch = commit['branch'] | |
266 | issues = _issues_as_dict(commits) |
|
310 | issues = _issues_as_dict(commits) | |
267 |
|
311 | |||
268 | branches = set( |
|
312 | branches = set() | |
269 | commit['branch'] for commit in commits if commit['branch']) |
|
313 | tags = set() | |
|
314 | for commit in commits: | |||
|
315 | if commit['refs']['tags']: | |||
|
316 | for tag in commit['refs']['tags']: | |||
|
317 | tags.add(tag) | |||
|
318 | if commit['branch']: | |||
|
319 | branches.add(commit['branch']) | |||
|
320 | ||||
|
321 | # maybe we have branches in new_refs ? | |||
|
322 | try: | |||
|
323 | branches = branches.union(set(self.new_refs['branches'])) | |||
|
324 | except Exception: | |||
|
325 | pass | |||
|
326 | ||||
270 | branches = [ |
|
327 | branches = [ | |
271 | { |
|
328 | { | |
272 | 'name': branch, |
|
329 | 'name': branch, | |
273 | 'url': branch_url(branch) |
|
330 | 'url': branch_url(branch) | |
274 | } |
|
331 | } | |
275 | for branch in branches |
|
332 | for branch in branches | |
276 | ] |
|
333 | ] | |
277 |
|
334 | |||
|
335 | # maybe we have branches in new_refs ? | |||
|
336 | try: | |||
|
337 | tags = tags.union(set(self.new_refs['tags'])) | |||
|
338 | except Exception: | |||
|
339 | pass | |||
|
340 | ||||
|
341 | tags = [ | |||
|
342 | { | |||
|
343 | 'name': tag, | |||
|
344 | 'url': tag_url(tag) | |||
|
345 | } | |||
|
346 | for tag in tags | |||
|
347 | ] | |||
|
348 | ||||
278 | data['push'] = { |
|
349 | data['push'] = { | |
279 | 'commits': commits, |
|
350 | 'commits': commits, | |
280 | 'issues': issues, |
|
351 | 'issues': issues, | |
281 | 'branches': branches, |
|
352 | 'branches': branches, | |
|
353 | 'tags': tags, | |||
282 | } |
|
354 | } | |
283 | return data |
|
355 | return data |
@@ -1,388 +1,394 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | from __future__ import unicode_literals |
|
21 | from __future__ import unicode_literals | |
22 | import string |
|
22 | import string | |
23 | from collections import OrderedDict |
|
23 | from collections import OrderedDict | |
24 |
|
24 | |||
25 | import deform |
|
25 | import deform | |
26 | import deform.widget |
|
26 | import deform.widget | |
27 | import logging |
|
27 | import logging | |
28 | import requests |
|
28 | import requests | |
29 | import requests.adapters |
|
29 | import requests.adapters | |
30 | import colander |
|
30 | import colander | |
31 | from requests.packages.urllib3.util.retry import Retry |
|
31 | from requests.packages.urllib3.util.retry import Retry | |
32 |
|
32 | |||
33 | import rhodecode |
|
33 | import rhodecode | |
34 | from rhodecode import events |
|
34 | from rhodecode import events | |
35 | from rhodecode.translation import _ |
|
35 | from rhodecode.translation import _ | |
36 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
36 | from rhodecode.integrations.types.base import IntegrationTypeBase | |
37 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask |
|
37 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask | |
38 |
|
38 | |||
39 | log = logging.getLogger(__name__) |
|
39 | log = logging.getLogger(__name__) | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | # updating this required to update the `common_vars` passed in url calling func |
|
42 | # updating this required to update the `common_vars` passed in url calling func | |
43 | WEBHOOK_URL_VARS = [ |
|
43 | WEBHOOK_URL_VARS = [ | |
44 | 'repo_name', |
|
44 | 'repo_name', | |
45 | 'repo_type', |
|
45 | 'repo_type', | |
46 | 'repo_id', |
|
46 | 'repo_id', | |
47 | 'repo_url', |
|
47 | 'repo_url', | |
48 | # extra repo fields |
|
48 | # extra repo fields | |
49 | 'extra:<extra_key_name>', |
|
49 | 'extra:<extra_key_name>', | |
50 |
|
50 | |||
51 | # special attrs below that we handle, using multi-call |
|
51 | # special attrs below that we handle, using multi-call | |
52 | 'branch', |
|
52 | 'branch', | |
53 | 'commit_id', |
|
53 | 'commit_id', | |
54 |
|
54 | |||
55 | # pr events vars |
|
55 | # pr events vars | |
56 | 'pull_request_id', |
|
56 | 'pull_request_id', | |
57 | 'pull_request_url', |
|
57 | 'pull_request_url', | |
58 |
|
58 | |||
59 | # user who triggers the call |
|
59 | # user who triggers the call | |
60 | 'username', |
|
60 | 'username', | |
61 | 'user_id', |
|
61 | 'user_id', | |
62 |
|
62 | |||
63 | ] |
|
63 | ] | |
64 | URL_VARS = ', '.join('${' + x + '}' for x in WEBHOOK_URL_VARS) |
|
64 | URL_VARS = ', '.join('${' + x + '}' for x in WEBHOOK_URL_VARS) | |
65 |
|
65 | |||
66 |
|
66 | |||
67 | def get_auth(settings): |
|
67 | def get_auth(settings): | |
68 | from requests.auth import HTTPBasicAuth |
|
68 | from requests.auth import HTTPBasicAuth | |
69 | username = settings.get('username') |
|
69 | username = settings.get('username') | |
70 | password = settings.get('password') |
|
70 | password = settings.get('password') | |
71 | if username and password: |
|
71 | if username and password: | |
72 | return HTTPBasicAuth(username, password) |
|
72 | return HTTPBasicAuth(username, password) | |
73 | return None |
|
73 | return None | |
74 |
|
74 | |||
75 |
|
75 | |||
76 | class WebhookHandler(object): |
|
76 | class WebhookHandler(object): | |
77 | def __init__(self, template_url, secret_token, headers): |
|
77 | def __init__(self, template_url, secret_token, headers): | |
78 | self.template_url = template_url |
|
78 | self.template_url = template_url | |
79 | self.secret_token = secret_token |
|
79 | self.secret_token = secret_token | |
80 | self.headers = headers |
|
80 | self.headers = headers | |
81 |
|
81 | |||
82 | def get_base_parsed_template(self, data): |
|
82 | def get_base_parsed_template(self, data): | |
83 | """ |
|
83 | """ | |
84 | initially parses the passed in template with some common variables |
|
84 | initially parses the passed in template with some common variables | |
85 | available on ALL calls |
|
85 | available on ALL calls | |
86 | """ |
|
86 | """ | |
87 | # note: make sure to update the `WEBHOOK_URL_VARS` if this changes |
|
87 | # note: make sure to update the `WEBHOOK_URL_VARS` if this changes | |
88 | common_vars = { |
|
88 | common_vars = { | |
89 | 'repo_name': data['repo']['repo_name'], |
|
89 | 'repo_name': data['repo']['repo_name'], | |
90 | 'repo_type': data['repo']['repo_type'], |
|
90 | 'repo_type': data['repo']['repo_type'], | |
91 | 'repo_id': data['repo']['repo_id'], |
|
91 | 'repo_id': data['repo']['repo_id'], | |
92 | 'repo_url': data['repo']['url'], |
|
92 | 'repo_url': data['repo']['url'], | |
93 | 'username': data['actor']['username'], |
|
93 | 'username': data['actor']['username'], | |
94 | 'user_id': data['actor']['user_id'] |
|
94 | 'user_id': data['actor']['user_id'] | |
95 | } |
|
95 | } | |
96 | extra_vars = {} |
|
96 | extra_vars = {} | |
97 | for extra_key, extra_val in data['repo']['extra_fields'].items(): |
|
97 | for extra_key, extra_val in data['repo']['extra_fields'].items(): | |
98 | extra_vars['extra:{}'.format(extra_key)] = extra_val |
|
98 | extra_vars['extra:{}'.format(extra_key)] = extra_val | |
99 | common_vars.update(extra_vars) |
|
99 | common_vars.update(extra_vars) | |
100 |
|
100 | |||
101 | return string.Template( |
|
101 | return string.Template( | |
102 | self.template_url).safe_substitute(**common_vars) |
|
102 | self.template_url).safe_substitute(**common_vars) | |
103 |
|
103 | |||
104 | def repo_push_event_handler(self, event, data): |
|
104 | def repo_push_event_handler(self, event, data): | |
105 | url = self.get_base_parsed_template(data) |
|
105 | url = self.get_base_parsed_template(data) | |
106 | url_cals = [] |
|
106 | url_cals = [] | |
107 | branch_data = OrderedDict() |
|
107 | branch_data = OrderedDict() | |
108 | for obj in data['push']['branches']: |
|
108 | for obj in data['push']['branches']: | |
109 | branch_data[obj['name']] = obj |
|
109 | branch_data[obj['name']] = obj | |
110 |
|
110 | |||
111 | branches_commits = OrderedDict() |
|
111 | branches_commits = OrderedDict() | |
112 | for commit in data['push']['commits']: |
|
112 | for commit in data['push']['commits']: | |
|
113 | if commit.get('git_ref_change'): | |||
|
114 | # special case for GIT that allows creating tags, | |||
|
115 | # deleting branches without associated commit | |||
|
116 | continue | |||
|
117 | ||||
113 | if commit['branch'] not in branches_commits: |
|
118 | if commit['branch'] not in branches_commits: | |
114 | branch_commits = {'branch': branch_data[commit['branch']], |
|
119 | branch_commits = {'branch': branch_data[commit['branch']], | |
115 | 'commits': []} |
|
120 | 'commits': []} | |
116 | branches_commits[commit['branch']] = branch_commits |
|
121 | branches_commits[commit['branch']] = branch_commits | |
117 |
|
122 | |||
118 | branch_commits = branches_commits[commit['branch']] |
|
123 | branch_commits = branches_commits[commit['branch']] | |
119 | branch_commits['commits'].append(commit) |
|
124 | branch_commits['commits'].append(commit) | |
120 |
|
125 | |||
121 | if '${branch}' in url: |
|
126 | if '${branch}' in url: | |
122 | # call it multiple times, for each branch if used in variables |
|
127 | # call it multiple times, for each branch if used in variables | |
123 | for branch, commit_ids in branches_commits.items(): |
|
128 | for branch, commit_ids in branches_commits.items(): | |
124 | branch_url = string.Template(url).safe_substitute(branch=branch) |
|
129 | branch_url = string.Template(url).safe_substitute(branch=branch) | |
125 | # call further down for each commit if used |
|
130 | # call further down for each commit if used | |
126 | if '${commit_id}' in branch_url: |
|
131 | if '${commit_id}' in branch_url: | |
127 | for commit_data in commit_ids['commits']: |
|
132 | for commit_data in commit_ids['commits']: | |
128 | commit_id = commit_data['raw_id'] |
|
133 | commit_id = commit_data['raw_id'] | |
129 | commit_url = string.Template(branch_url).safe_substitute( |
|
134 | commit_url = string.Template(branch_url).safe_substitute( | |
130 | commit_id=commit_id) |
|
135 | commit_id=commit_id) | |
131 | # register per-commit call |
|
136 | # register per-commit call | |
132 | log.debug( |
|
137 | log.debug( | |
133 | 'register webhook call(%s) to url %s', event, commit_url) |
|
138 | 'register webhook call(%s) to url %s', event, commit_url) | |
134 | url_cals.append((commit_url, self.secret_token, self.headers, data)) |
|
139 | url_cals.append((commit_url, self.secret_token, self.headers, data)) | |
135 |
|
140 | |||
136 | else: |
|
141 | else: | |
137 | # register per-branch call |
|
142 | # register per-branch call | |
138 | log.debug( |
|
143 | log.debug( | |
139 | 'register webhook call(%s) to url %s', event, branch_url) |
|
144 | 'register webhook call(%s) to url %s', event, branch_url) | |
140 | url_cals.append((branch_url, self.secret_token, self.headers, data)) |
|
145 | url_cals.append((branch_url, self.secret_token, self.headers, data)) | |
141 |
|
146 | |||
142 | else: |
|
147 | else: | |
143 | log.debug( |
|
148 | log.debug( | |
144 | 'register webhook call(%s) to url %s', event, url) |
|
149 | 'register webhook call(%s) to url %s', event, url) | |
145 | url_cals.append((url, self.secret_token, self.headers, data)) |
|
150 | url_cals.append((url, self.secret_token, self.headers, data)) | |
146 |
|
151 | |||
147 | return url_cals |
|
152 | return url_cals | |
148 |
|
153 | |||
149 | def repo_create_event_handler(self, event, data): |
|
154 | def repo_create_event_handler(self, event, data): | |
150 | url = self.get_base_parsed_template(data) |
|
155 | url = self.get_base_parsed_template(data) | |
151 | log.debug( |
|
156 | log.debug( | |
152 | 'register webhook call(%s) to url %s', event, url) |
|
157 | 'register webhook call(%s) to url %s', event, url) | |
153 | return [(url, self.secret_token, self.headers, data)] |
|
158 | return [(url, self.secret_token, self.headers, data)] | |
154 |
|
159 | |||
155 | def pull_request_event_handler(self, event, data): |
|
160 | def pull_request_event_handler(self, event, data): | |
156 | url = self.get_base_parsed_template(data) |
|
161 | url = self.get_base_parsed_template(data) | |
157 | log.debug( |
|
162 | log.debug( | |
158 | 'register webhook call(%s) to url %s', event, url) |
|
163 | 'register webhook call(%s) to url %s', event, url) | |
159 | url = string.Template(url).safe_substitute( |
|
164 | url = string.Template(url).safe_substitute( | |
160 | pull_request_id=data['pullrequest']['pull_request_id'], |
|
165 | pull_request_id=data['pullrequest']['pull_request_id'], | |
161 | pull_request_url=data['pullrequest']['url']) |
|
166 | pull_request_url=data['pullrequest']['url']) | |
162 | return [(url, self.secret_token, self.headers, data)] |
|
167 | return [(url, self.secret_token, self.headers, data)] | |
163 |
|
168 | |||
164 | def __call__(self, event, data): |
|
169 | def __call__(self, event, data): | |
165 | if isinstance(event, events.RepoPushEvent): |
|
170 | if isinstance(event, events.RepoPushEvent): | |
166 | return self.repo_push_event_handler(event, data) |
|
171 | return self.repo_push_event_handler(event, data) | |
167 | elif isinstance(event, events.RepoCreateEvent): |
|
172 | elif isinstance(event, events.RepoCreateEvent): | |
168 | return self.repo_create_event_handler(event, data) |
|
173 | return self.repo_create_event_handler(event, data) | |
169 | elif isinstance(event, events.PullRequestEvent): |
|
174 | elif isinstance(event, events.PullRequestEvent): | |
170 | return self.pull_request_event_handler(event, data) |
|
175 | return self.pull_request_event_handler(event, data) | |
171 | else: |
|
176 | else: | |
172 | raise ValueError('event type not supported: %s' % events) |
|
177 | raise ValueError('event type not supported: %s' % events) | |
173 |
|
178 | |||
174 |
|
179 | |||
175 | class WebhookSettingsSchema(colander.Schema): |
|
180 | class WebhookSettingsSchema(colander.Schema): | |
176 | url = colander.SchemaNode( |
|
181 | url = colander.SchemaNode( | |
177 | colander.String(), |
|
182 | colander.String(), | |
178 | title=_('Webhook URL'), |
|
183 | title=_('Webhook URL'), | |
179 | description= |
|
184 | description= | |
180 | _('URL to which Webhook should submit data. Following variables ' |
|
185 | _('URL to which Webhook should submit data. Following variables ' | |
181 | 'are allowed to be used: {vars}. Some of the variables would ' |
|
186 | 'are allowed to be used: {vars}. Some of the variables would ' | |
182 | 'trigger multiple calls, like ${{branch}} or ${{commit_id}}. ' |
|
187 | 'trigger multiple calls, like ${{branch}} or ${{commit_id}}. ' | |
183 | 'Webhook will be called as many times as unique objects in ' |
|
188 | 'Webhook will be called as many times as unique objects in ' | |
184 | 'data in such cases.').format(vars=URL_VARS), |
|
189 | 'data in such cases.').format(vars=URL_VARS), | |
185 | missing=colander.required, |
|
190 | missing=colander.required, | |
186 | required=True, |
|
191 | required=True, | |
187 | validator=colander.url, |
|
192 | validator=colander.url, | |
188 | widget=deform.widget.TextInputWidget( |
|
193 | widget=deform.widget.TextInputWidget( | |
189 | placeholder='https://www.example.com/webhook' |
|
194 | placeholder='https://www.example.com/webhook' | |
190 | ), |
|
195 | ), | |
191 | ) |
|
196 | ) | |
192 | secret_token = colander.SchemaNode( |
|
197 | secret_token = colander.SchemaNode( | |
193 | colander.String(), |
|
198 | colander.String(), | |
194 | title=_('Secret Token'), |
|
199 | title=_('Secret Token'), | |
195 | description=_('Optional string used to validate received payloads. ' |
|
200 | description=_('Optional string used to validate received payloads. ' | |
196 | 'It will be sent together with event data in JSON'), |
|
201 | 'It will be sent together with event data in JSON'), | |
197 | default='', |
|
202 | default='', | |
198 | missing='', |
|
203 | missing='', | |
199 | widget=deform.widget.TextInputWidget( |
|
204 | widget=deform.widget.TextInputWidget( | |
200 | placeholder='e.g. secret_token' |
|
205 | placeholder='e.g. secret_token' | |
201 | ), |
|
206 | ), | |
202 | ) |
|
207 | ) | |
203 | username = colander.SchemaNode( |
|
208 | username = colander.SchemaNode( | |
204 | colander.String(), |
|
209 | colander.String(), | |
205 | title=_('Username'), |
|
210 | title=_('Username'), | |
206 | description=_('Optional username to authenticate the call.'), |
|
211 | description=_('Optional username to authenticate the call.'), | |
207 | default='', |
|
212 | default='', | |
208 | missing='', |
|
213 | missing='', | |
209 | widget=deform.widget.TextInputWidget( |
|
214 | widget=deform.widget.TextInputWidget( | |
210 | placeholder='e.g. admin' |
|
215 | placeholder='e.g. admin' | |
211 | ), |
|
216 | ), | |
212 | ) |
|
217 | ) | |
213 | password = colander.SchemaNode( |
|
218 | password = colander.SchemaNode( | |
214 | colander.String(), |
|
219 | colander.String(), | |
215 | title=_('Password'), |
|
220 | title=_('Password'), | |
216 | description=_('Optional password to authenticate the call.'), |
|
221 | description=_('Optional password to authenticate the call.'), | |
217 | default='', |
|
222 | default='', | |
218 | missing='', |
|
223 | missing='', | |
219 | widget=deform.widget.PasswordWidget( |
|
224 | widget=deform.widget.PasswordWidget( | |
220 | placeholder='e.g. secret.', |
|
225 | placeholder='e.g. secret.', | |
221 | redisplay=True, |
|
226 | redisplay=True, | |
222 | ), |
|
227 | ), | |
223 | ) |
|
228 | ) | |
224 | custom_header_key = colander.SchemaNode( |
|
229 | custom_header_key = colander.SchemaNode( | |
225 | colander.String(), |
|
230 | colander.String(), | |
226 | title=_('Custom Header Key'), |
|
231 | title=_('Custom Header Key'), | |
227 | description=_('Custom Header name to be set when calling endpoint.'), |
|
232 | description=_('Custom Header name to be set when calling endpoint.'), | |
228 | default='', |
|
233 | default='', | |
229 | missing='', |
|
234 | missing='', | |
230 | widget=deform.widget.TextInputWidget( |
|
235 | widget=deform.widget.TextInputWidget( | |
231 | placeholder='e.g.Authorization' |
|
236 | placeholder='e.g.Authorization' | |
232 | ), |
|
237 | ), | |
233 | ) |
|
238 | ) | |
234 | custom_header_val = colander.SchemaNode( |
|
239 | custom_header_val = colander.SchemaNode( | |
235 | colander.String(), |
|
240 | colander.String(), | |
236 | title=_('Custom Header Value'), |
|
241 | title=_('Custom Header Value'), | |
237 | description=_('Custom Header value to be set when calling endpoint.'), |
|
242 | description=_('Custom Header value to be set when calling endpoint.'), | |
238 | default='', |
|
243 | default='', | |
239 | missing='', |
|
244 | missing='', | |
240 | widget=deform.widget.TextInputWidget( |
|
245 | widget=deform.widget.TextInputWidget( | |
241 | placeholder='e.g. RcLogin auth=xxxx' |
|
246 | placeholder='e.g. RcLogin auth=xxxx' | |
242 | ), |
|
247 | ), | |
243 | ) |
|
248 | ) | |
244 | method_type = colander.SchemaNode( |
|
249 | method_type = colander.SchemaNode( | |
245 | colander.String(), |
|
250 | colander.String(), | |
246 | title=_('Call Method'), |
|
251 | title=_('Call Method'), | |
247 | description=_('Select if the Webhook call should be made ' |
|
252 | description=_('Select if the Webhook call should be made ' | |
248 | 'with POST or GET.'), |
|
253 | 'with POST or GET.'), | |
249 | default='post', |
|
254 | default='post', | |
250 | missing='', |
|
255 | missing='', | |
251 | widget=deform.widget.RadioChoiceWidget( |
|
256 | widget=deform.widget.RadioChoiceWidget( | |
252 | values=[('get', 'GET'), ('post', 'POST')], |
|
257 | values=[('get', 'GET'), ('post', 'POST')], | |
253 | inline=True |
|
258 | inline=True | |
254 | ), |
|
259 | ), | |
255 | ) |
|
260 | ) | |
256 |
|
261 | |||
257 |
|
262 | |||
258 | class WebhookIntegrationType(IntegrationTypeBase): |
|
263 | class WebhookIntegrationType(IntegrationTypeBase): | |
259 | key = 'webhook' |
|
264 | key = 'webhook' | |
260 | display_name = _('Webhook') |
|
265 | display_name = _('Webhook') | |
261 | description = _('Post json events to a Webhook endpoint') |
|
266 | description = _('Post json events to a Webhook endpoint') | |
262 | icon = '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 239" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M119.540432,100.502743 C108.930124,118.338815 98.7646301,135.611455 88.3876025,152.753617 C85.7226696,157.154315 84.4040417,160.738531 86.5332204,166.333309 C92.4107024,181.787152 84.1193605,196.825836 68.5350381,200.908244 C53.8383677,204.759349 39.5192953,195.099955 36.6032893,179.365384 C34.0194114,165.437749 44.8274148,151.78491 60.1824106,149.608284 C61.4694072,149.424428 62.7821041,149.402681 64.944891,149.240571 C72.469175,136.623655 80.1773157,123.700312 88.3025935,110.073173 C73.611854,95.4654658 64.8677898,78.3885437 66.803227,57.2292132 C68.1712787,42.2715849 74.0527146,29.3462646 84.8033863,18.7517722 C105.393354,-1.53572199 136.805164,-4.82141828 161.048542,10.7510424 C184.333097,25.7086706 194.996783,54.8450075 185.906752,79.7822957 C179.052655,77.9239597 172.151111,76.049808 164.563565,73.9917997 C167.418285,60.1274266 165.306899,47.6765751 155.95591,37.0109123 C149.777932,29.9690049 141.850349,26.2780332 132.835442,24.9178894 C114.764113,22.1877169 97.0209573,33.7983633 91.7563309,51.5355878 C85.7800012,71.6669027 94.8245623,88.1111998 119.540432,100.502743 L119.540432,100.502743 Z" fill="#C73A63"></path><path d="M149.841194,79.4106285 C157.316054,92.5969067 164.905578,105.982857 172.427885,119.246236 C210.44865,107.483365 239.114472,128.530009 249.398582,151.063322 C261.81978,178.282014 253.328765,210.520191 228.933162,227.312431 C203.893073,244.551464 172.226236,241.605803 150.040866,219.46195 C155.694953,214.729124 161.376716,209.974552 167.44794,204.895759 C189.360489,219.088306 208.525074,218.420096 222.753207,201.614016 C234.885769,187.277151 234.622834,165.900356 222.138374,151.863988 C207.730339,135.66681 188.431321,135.172572 165.103273,150.721309 C155.426087,133.553447 145.58086,116.521995 136.210101,99.2295848 C133.05093,93.4015266 129.561608,90.0209366 122.440622,88.7873178 C110.547271,86.7253555 102.868785,76.5124151 102.408155,65.0698097 C101.955433,53.7537294 108.621719,43.5249733 119.04224,39.5394355 C129.363912,35.5914599 141.476705,38.7783085 148.419765,47.554004 C154.093621,54.7244134 155.896602,62.7943365 152.911402,71.6372484 C152.081082,74.1025091 151.00562,76.4886916 149.841194,79.4106285 L149.841194,79.4106285 Z" fill="#4B4B4B"></path><path d="M167.706921,187.209935 L121.936499,187.209935 C117.54964,205.253587 108.074103,219.821756 91.7464461,229.085759 C79.0544063,236.285822 65.3738898,238.72736 50.8136292,236.376762 C24.0061432,232.053165 2.08568567,207.920497 0.156179306,180.745298 C-2.02835403,149.962159 19.1309765,122.599149 47.3341915,116.452801 C49.2814904,123.524363 51.2485589,130.663141 53.1958579,137.716911 C27.3195169,150.919004 18.3639187,167.553089 25.6054984,188.352614 C31.9811726,206.657224 50.0900643,216.690262 69.7528413,212.809503 C89.8327554,208.847688 99.9567329,192.160226 98.7211371,165.37844 C117.75722,165.37844 136.809118,165.180745 155.847178,165.475311 C163.280522,165.591951 169.019617,164.820939 174.620326,158.267339 C183.840836,147.48306 200.811003,148.455721 210.741239,158.640984 C220.88894,169.049642 220.402609,185.79839 209.663799,195.768166 C199.302587,205.38802 182.933414,204.874012 173.240413,194.508846 C171.247644,192.37176 169.677943,189.835329 167.706921,187.209935 L167.706921,187.209935 Z" fill="#4A4A4A"></path></g></svg>''' |
|
267 | icon = '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 239" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M119.540432,100.502743 C108.930124,118.338815 98.7646301,135.611455 88.3876025,152.753617 C85.7226696,157.154315 84.4040417,160.738531 86.5332204,166.333309 C92.4107024,181.787152 84.1193605,196.825836 68.5350381,200.908244 C53.8383677,204.759349 39.5192953,195.099955 36.6032893,179.365384 C34.0194114,165.437749 44.8274148,151.78491 60.1824106,149.608284 C61.4694072,149.424428 62.7821041,149.402681 64.944891,149.240571 C72.469175,136.623655 80.1773157,123.700312 88.3025935,110.073173 C73.611854,95.4654658 64.8677898,78.3885437 66.803227,57.2292132 C68.1712787,42.2715849 74.0527146,29.3462646 84.8033863,18.7517722 C105.393354,-1.53572199 136.805164,-4.82141828 161.048542,10.7510424 C184.333097,25.7086706 194.996783,54.8450075 185.906752,79.7822957 C179.052655,77.9239597 172.151111,76.049808 164.563565,73.9917997 C167.418285,60.1274266 165.306899,47.6765751 155.95591,37.0109123 C149.777932,29.9690049 141.850349,26.2780332 132.835442,24.9178894 C114.764113,22.1877169 97.0209573,33.7983633 91.7563309,51.5355878 C85.7800012,71.6669027 94.8245623,88.1111998 119.540432,100.502743 L119.540432,100.502743 Z" fill="#C73A63"></path><path d="M149.841194,79.4106285 C157.316054,92.5969067 164.905578,105.982857 172.427885,119.246236 C210.44865,107.483365 239.114472,128.530009 249.398582,151.063322 C261.81978,178.282014 253.328765,210.520191 228.933162,227.312431 C203.893073,244.551464 172.226236,241.605803 150.040866,219.46195 C155.694953,214.729124 161.376716,209.974552 167.44794,204.895759 C189.360489,219.088306 208.525074,218.420096 222.753207,201.614016 C234.885769,187.277151 234.622834,165.900356 222.138374,151.863988 C207.730339,135.66681 188.431321,135.172572 165.103273,150.721309 C155.426087,133.553447 145.58086,116.521995 136.210101,99.2295848 C133.05093,93.4015266 129.561608,90.0209366 122.440622,88.7873178 C110.547271,86.7253555 102.868785,76.5124151 102.408155,65.0698097 C101.955433,53.7537294 108.621719,43.5249733 119.04224,39.5394355 C129.363912,35.5914599 141.476705,38.7783085 148.419765,47.554004 C154.093621,54.7244134 155.896602,62.7943365 152.911402,71.6372484 C152.081082,74.1025091 151.00562,76.4886916 149.841194,79.4106285 L149.841194,79.4106285 Z" fill="#4B4B4B"></path><path d="M167.706921,187.209935 L121.936499,187.209935 C117.54964,205.253587 108.074103,219.821756 91.7464461,229.085759 C79.0544063,236.285822 65.3738898,238.72736 50.8136292,236.376762 C24.0061432,232.053165 2.08568567,207.920497 0.156179306,180.745298 C-2.02835403,149.962159 19.1309765,122.599149 47.3341915,116.452801 C49.2814904,123.524363 51.2485589,130.663141 53.1958579,137.716911 C27.3195169,150.919004 18.3639187,167.553089 25.6054984,188.352614 C31.9811726,206.657224 50.0900643,216.690262 69.7528413,212.809503 C89.8327554,208.847688 99.9567329,192.160226 98.7211371,165.37844 C117.75722,165.37844 136.809118,165.180745 155.847178,165.475311 C163.280522,165.591951 169.019617,164.820939 174.620326,158.267339 C183.840836,147.48306 200.811003,148.455721 210.741239,158.640984 C220.88894,169.049642 220.402609,185.79839 209.663799,195.768166 C199.302587,205.38802 182.933414,204.874012 173.240413,194.508846 C171.247644,192.37176 169.677943,189.835329 167.706921,187.209935 L167.706921,187.209935 Z" fill="#4A4A4A"></path></g></svg>''' | |
263 |
|
268 | |||
264 | valid_events = [ |
|
269 | valid_events = [ | |
265 | events.PullRequestCloseEvent, |
|
270 | events.PullRequestCloseEvent, | |
266 | events.PullRequestMergeEvent, |
|
271 | events.PullRequestMergeEvent, | |
267 | events.PullRequestUpdateEvent, |
|
272 | events.PullRequestUpdateEvent, | |
268 | events.PullRequestCommentEvent, |
|
273 | events.PullRequestCommentEvent, | |
269 | events.PullRequestReviewEvent, |
|
274 | events.PullRequestReviewEvent, | |
270 | events.PullRequestCreateEvent, |
|
275 | events.PullRequestCreateEvent, | |
271 | events.RepoPushEvent, |
|
276 | events.RepoPushEvent, | |
272 | events.RepoCreateEvent, |
|
277 | events.RepoCreateEvent, | |
273 | ] |
|
278 | ] | |
274 |
|
279 | |||
275 | def settings_schema(self): |
|
280 | def settings_schema(self): | |
276 | schema = WebhookSettingsSchema() |
|
281 | schema = WebhookSettingsSchema() | |
277 | schema.add(colander.SchemaNode( |
|
282 | schema.add(colander.SchemaNode( | |
278 | colander.Set(), |
|
283 | colander.Set(), | |
279 | widget=deform.widget.CheckboxChoiceWidget( |
|
284 | widget=deform.widget.CheckboxChoiceWidget( | |
280 | values=sorted( |
|
285 | values=sorted( | |
281 | [(e.name, e.display_name) for e in self.valid_events] |
|
286 | [(e.name, e.display_name) for e in self.valid_events] | |
282 | ) |
|
287 | ) | |
283 | ), |
|
288 | ), | |
284 | description="Events activated for this integration", |
|
289 | description="Events activated for this integration", | |
285 | name='events' |
|
290 | name='events' | |
286 | )) |
|
291 | )) | |
287 | return schema |
|
292 | return schema | |
288 |
|
293 | |||
289 | def send_event(self, event): |
|
294 | def send_event(self, event): | |
290 | log.debug('handling event %s with Webhook integration %s', |
|
295 | log.debug('handling event %s with Webhook integration %s', | |
291 | event.name, self) |
|
296 | event.name, self) | |
292 |
|
297 | |||
293 | if event.__class__ not in self.valid_events: |
|
298 | if event.__class__ not in self.valid_events: | |
294 | log.debug('event not valid: %r' % event) |
|
299 | log.debug('event not valid: %r' % event) | |
295 | return |
|
300 | return | |
296 |
|
301 | |||
297 | if event.name not in self.settings['events']: |
|
302 | if event.name not in self.settings['events']: | |
298 | log.debug('event ignored: %r' % event) |
|
303 | log.debug('event ignored: %r' % event) | |
299 | return |
|
304 | return | |
300 |
|
305 | |||
301 | data = event.as_dict() |
|
306 | data = event.as_dict() | |
302 | template_url = self.settings['url'] |
|
307 | template_url = self.settings['url'] | |
303 |
|
308 | |||
304 | headers = {} |
|
309 | headers = {} | |
305 | head_key = self.settings.get('custom_header_key') |
|
310 | head_key = self.settings.get('custom_header_key') | |
306 | head_val = self.settings.get('custom_header_val') |
|
311 | head_val = self.settings.get('custom_header_val') | |
307 | if head_key and head_val: |
|
312 | if head_key and head_val: | |
308 | headers = {head_key: head_val} |
|
313 | headers = {head_key: head_val} | |
309 |
|
314 | |||
310 | handler = WebhookHandler( |
|
315 | handler = WebhookHandler( | |
311 | template_url, self.settings['secret_token'], headers) |
|
316 | template_url, self.settings['secret_token'], headers) | |
312 |
|
317 | |||
313 | url_calls = handler(event, data) |
|
318 | url_calls = handler(event, data) | |
314 | log.debug('webhook: calling following urls: %s', |
|
319 | log.debug('webhook: calling following urls: %s', | |
315 | [x[0] for x in url_calls]) |
|
320 | [x[0] for x in url_calls]) | |
316 |
|
321 | |||
317 | run_task(post_to_webhook, url_calls, self.settings) |
|
322 | run_task(post_to_webhook, url_calls, self.settings) | |
318 |
|
323 | |||
319 |
|
324 | |||
320 | @async_task(ignore_result=True, base=RequestContextTask) |
|
325 | @async_task(ignore_result=True, base=RequestContextTask) | |
321 | def post_to_webhook(url_calls, settings): |
|
326 | def post_to_webhook(url_calls, settings): | |
322 | """ |
|
327 | """ | |
323 | Example data:: |
|
328 | Example data:: | |
324 |
|
329 | |||
325 | {'actor': {'user_id': 2, 'username': u'admin'}, |
|
330 | {'actor': {'user_id': 2, 'username': u'admin'}, | |
326 | 'actor_ip': u'192.168.157.1', |
|
331 | 'actor_ip': u'192.168.157.1', | |
327 | 'name': 'repo-push', |
|
332 | 'name': 'repo-push', | |
328 | 'push': {'branches': [{'name': u'default', |
|
333 | 'push': {'branches': [{'name': u'default', | |
329 | 'url': 'http://rc.local:8080/hg-repo/changelog?branch=default'}], |
|
334 | 'url': 'http://rc.local:8080/hg-repo/changelog?branch=default'}], | |
330 | 'commits': [{'author': u'Marcin Kuzminski <marcin@rhodecode.com>', |
|
335 | 'commits': [{'author': u'Marcin Kuzminski <marcin@rhodecode.com>', | |
331 | 'branch': u'default', |
|
336 | 'branch': u'default', | |
332 | 'date': datetime.datetime(2017, 11, 30, 12, 59, 48), |
|
337 | 'date': datetime.datetime(2017, 11, 30, 12, 59, 48), | |
333 | 'issues': [], |
|
338 | 'issues': [], | |
334 | 'mentions': [], |
|
339 | 'mentions': [], | |
335 | 'message': u'commit Thu 30 Nov 2017 13:59:48 CET', |
|
340 | 'message': u'commit Thu 30 Nov 2017 13:59:48 CET', | |
336 | 'message_html': u'commit Thu 30 Nov 2017 13:59:48 CET', |
|
341 | 'message_html': u'commit Thu 30 Nov 2017 13:59:48 CET', | |
337 | 'message_html_title': u'commit Thu 30 Nov 2017 13:59:48 CET', |
|
342 | 'message_html_title': u'commit Thu 30 Nov 2017 13:59:48 CET', | |
338 | 'parents': [{'raw_id': '431b772a5353dad9974b810dd3707d79e3a7f6e0'}], |
|
343 | 'parents': [{'raw_id': '431b772a5353dad9974b810dd3707d79e3a7f6e0'}], | |
339 | 'permalink_url': u'http://rc.local:8080/_7/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf', |
|
344 | 'permalink_url': u'http://rc.local:8080/_7/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf', | |
340 | 'raw_id': 'a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf', |
|
345 | 'raw_id': 'a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf', | |
341 | 'refs': {'bookmarks': [], 'branches': [u'default'], 'tags': [u'tip']}, |
|
346 | 'refs': {'bookmarks': [], 'branches': [u'default'], 'tags': [u'tip']}, | |
342 | 'reviewers': [], |
|
347 | 'reviewers': [], | |
343 | 'revision': 9L, |
|
348 | 'revision': 9L, | |
344 | 'short_id': 'a815cc738b96', |
|
349 | 'short_id': 'a815cc738b96', | |
345 | 'url': u'http://rc.local:8080/hg-repo/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf'}], |
|
350 | 'url': u'http://rc.local:8080/hg-repo/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf'}], | |
346 | 'issues': {}}, |
|
351 | 'issues': {}}, | |
347 | 'repo': {'extra_fields': '', |
|
352 | 'repo': {'extra_fields': '', | |
348 | 'permalink_url': u'http://rc.local:8080/_7', |
|
353 | 'permalink_url': u'http://rc.local:8080/_7', | |
349 | 'repo_id': 7, |
|
354 | 'repo_id': 7, | |
350 | 'repo_name': u'hg-repo', |
|
355 | 'repo_name': u'hg-repo', | |
351 | 'repo_type': u'hg', |
|
356 | 'repo_type': u'hg', | |
352 | 'url': u'http://rc.local:8080/hg-repo'}, |
|
357 | 'url': u'http://rc.local:8080/hg-repo'}, | |
353 | 'server_url': u'http://rc.local:8080', |
|
358 | 'server_url': u'http://rc.local:8080', | |
354 | 'utc_timestamp': datetime.datetime(2017, 11, 30, 13, 0, 1, 569276) |
|
359 | 'utc_timestamp': datetime.datetime(2017, 11, 30, 13, 0, 1, 569276) | |
355 |
|
360 | |||
356 | """ |
|
361 | """ | |
357 | max_retries = 3 |
|
362 | max_retries = 3 | |
358 | retries = Retry( |
|
363 | retries = Retry( | |
359 | total=max_retries, |
|
364 | total=max_retries, | |
360 | backoff_factor=0.15, |
|
365 | backoff_factor=0.15, | |
361 | status_forcelist=[500, 502, 503, 504]) |
|
366 | status_forcelist=[500, 502, 503, 504]) | |
362 | call_headers = { |
|
367 | call_headers = { | |
363 | 'User-Agent': 'RhodeCode-webhook-caller/{}'.format( |
|
368 | 'User-Agent': 'RhodeCode-webhook-caller/{}'.format( | |
364 | rhodecode.__version__) |
|
369 | rhodecode.__version__) | |
365 | } # updated below with custom ones, allows override |
|
370 | } # updated below with custom ones, allows override | |
366 |
|
371 | |||
367 | for url, token, headers, data in url_calls: |
|
372 | for url, token, headers, data in url_calls: | |
368 | req_session = requests.Session() |
|
373 | req_session = requests.Session() | |
369 | req_session.mount( # retry max N times |
|
374 | req_session.mount( # retry max N times | |
370 | 'http://', requests.adapters.HTTPAdapter(max_retries=retries)) |
|
375 | 'http://', requests.adapters.HTTPAdapter(max_retries=retries)) | |
371 |
|
376 | |||
372 | method = settings.get('method_type') or 'post' |
|
377 | method = settings.get('method_type') or 'post' | |
373 | call_method = getattr(req_session, method) |
|
378 | call_method = getattr(req_session, method) | |
374 |
|
379 | |||
375 | headers = headers or {} |
|
380 | headers = headers or {} | |
376 | call_headers.update(headers) |
|
381 | call_headers.update(headers) | |
377 | auth = get_auth(settings) |
|
382 | auth = get_auth(settings) | |
378 |
|
383 | |||
379 | log.debug('calling Webhook with method: %s, and auth:%s', |
|
384 | log.debug('calling Webhook with method: %s, and auth:%s', | |
380 | call_method, auth) |
|
385 | call_method, auth) | |
381 |
|
386 | if settings.get('log_data'): | ||
|
387 | log.debug('calling webhook with data: %s', data) | |||
382 | resp = call_method(url, json={ |
|
388 | resp = call_method(url, json={ | |
383 | 'token': token, |
|
389 | 'token': token, | |
384 | 'event': data |
|
390 | 'event': data | |
385 | }, headers=call_headers, auth=auth) |
|
391 | }, headers=call_headers, auth=auth) | |
386 | log.debug('Got Webhook response: %s', resp) |
|
392 | log.debug('Got Webhook response: %s', resp) | |
387 |
|
393 | |||
388 | resp.raise_for_status() # raise exception on a failed request |
|
394 | resp.raise_for_status() # raise exception on a failed request |
@@ -1,460 +1,459 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import deform |
|
21 | import deform | |
22 | import logging |
|
22 | import logging | |
23 | import peppercorn |
|
23 | import peppercorn | |
24 | import webhelpers.paginate |
|
24 | import webhelpers.paginate | |
25 |
|
25 | |||
26 | from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPNotFound |
|
26 | from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPNotFound | |
27 |
|
27 | |||
28 | from rhodecode.apps._base import BaseAppView |
|
28 | from rhodecode.apps._base import BaseAppView | |
29 | from rhodecode.integrations import integration_type_registry |
|
29 | from rhodecode.integrations import integration_type_registry | |
30 | from rhodecode.apps.admin.navigation import navigation_list |
|
30 | from rhodecode.apps.admin.navigation import navigation_list | |
31 | from rhodecode.lib.auth import ( |
|
31 | from rhodecode.lib.auth import ( | |
32 | LoginRequired, CSRFRequired, HasPermissionAnyDecorator, |
|
32 | LoginRequired, CSRFRequired, HasPermissionAnyDecorator, | |
33 | HasRepoPermissionAnyDecorator, HasRepoGroupPermissionAnyDecorator) |
|
33 | HasRepoPermissionAnyDecorator, HasRepoGroupPermissionAnyDecorator) | |
34 | from rhodecode.lib.utils2 import safe_int |
|
34 | from rhodecode.lib.utils2 import safe_int | |
35 | from rhodecode.lib import helpers as h |
|
35 | from rhodecode.lib import helpers as h | |
36 | from rhodecode.model.db import Repository, RepoGroup, Session, Integration |
|
36 | from rhodecode.model.db import Repository, RepoGroup, Session, Integration | |
37 | from rhodecode.model.scm import ScmModel |
|
37 | from rhodecode.model.scm import ScmModel | |
38 | from rhodecode.model.integration import IntegrationModel |
|
38 | from rhodecode.model.integration import IntegrationModel | |
39 | from rhodecode.model.validation_schema.schemas.integration_schema import ( |
|
39 | from rhodecode.model.validation_schema.schemas.integration_schema import ( | |
40 | make_integration_schema, IntegrationScopeType) |
|
40 | make_integration_schema, IntegrationScopeType) | |
41 |
|
41 | |||
42 | log = logging.getLogger(__name__) |
|
42 | log = logging.getLogger(__name__) | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | class IntegrationSettingsViewBase(BaseAppView): |
|
45 | class IntegrationSettingsViewBase(BaseAppView): | |
46 | """ |
|
46 | """ | |
47 | Base Integration settings view used by both repo / global settings |
|
47 | Base Integration settings view used by both repo / global settings | |
48 | """ |
|
48 | """ | |
49 |
|
49 | |||
50 | def __init__(self, context, request): |
|
50 | def __init__(self, context, request): | |
51 | super(IntegrationSettingsViewBase, self).__init__(context, request) |
|
51 | super(IntegrationSettingsViewBase, self).__init__(context, request) | |
52 | self._load_view_context() |
|
52 | self._load_view_context() | |
53 |
|
53 | |||
54 | def _load_view_context(self): |
|
54 | def _load_view_context(self): | |
55 | """ |
|
55 | """ | |
56 | This avoids boilerplate for repo/global+list/edit+views/templates |
|
56 | This avoids boilerplate for repo/global+list/edit+views/templates | |
57 | by doing all possible contexts at the same time however it should |
|
57 | by doing all possible contexts at the same time however it should | |
58 | be split up into separate functions once more "contexts" exist |
|
58 | be split up into separate functions once more "contexts" exist | |
59 | """ |
|
59 | """ | |
60 |
|
60 | |||
61 | self.IntegrationType = None |
|
61 | self.IntegrationType = None | |
62 | self.repo = None |
|
62 | self.repo = None | |
63 | self.repo_group = None |
|
63 | self.repo_group = None | |
64 | self.integration = None |
|
64 | self.integration = None | |
65 | self.integrations = {} |
|
65 | self.integrations = {} | |
66 |
|
66 | |||
67 | request = self.request |
|
67 | request = self.request | |
68 |
|
68 | |||
69 | if 'repo_name' in request.matchdict: # in repo settings context |
|
69 | if 'repo_name' in request.matchdict: # in repo settings context | |
70 | repo_name = request.matchdict['repo_name'] |
|
70 | repo_name = request.matchdict['repo_name'] | |
71 | self.repo = Repository.get_by_repo_name(repo_name) |
|
71 | self.repo = Repository.get_by_repo_name(repo_name) | |
72 |
|
72 | |||
73 | if 'repo_group_name' in request.matchdict: # in group settings context |
|
73 | if 'repo_group_name' in request.matchdict: # in group settings context | |
74 | repo_group_name = request.matchdict['repo_group_name'] |
|
74 | repo_group_name = request.matchdict['repo_group_name'] | |
75 | self.repo_group = RepoGroup.get_by_group_name(repo_group_name) |
|
75 | self.repo_group = RepoGroup.get_by_group_name(repo_group_name) | |
76 |
|
76 | |||
77 | if 'integration' in request.matchdict: # integration type context |
|
77 | if 'integration' in request.matchdict: # integration type context | |
78 | integration_type = request.matchdict['integration'] |
|
78 | integration_type = request.matchdict['integration'] | |
79 | if integration_type not in integration_type_registry: |
|
79 | if integration_type not in integration_type_registry: | |
80 | raise HTTPNotFound() |
|
80 | raise HTTPNotFound() | |
81 |
|
81 | |||
82 | self.IntegrationType = integration_type_registry[integration_type] |
|
82 | self.IntegrationType = integration_type_registry[integration_type] | |
83 | if self.IntegrationType.is_dummy: |
|
83 | if self.IntegrationType.is_dummy: | |
84 | raise HTTPNotFound() |
|
84 | raise HTTPNotFound() | |
85 |
|
85 | |||
86 | if 'integration_id' in request.matchdict: # single integration context |
|
86 | if 'integration_id' in request.matchdict: # single integration context | |
87 | integration_id = request.matchdict['integration_id'] |
|
87 | integration_id = request.matchdict['integration_id'] | |
88 | self.integration = Integration.get(integration_id) |
|
88 | self.integration = Integration.get(integration_id) | |
89 |
|
89 | |||
90 | # extra perms check just in case |
|
90 | # extra perms check just in case | |
91 | if not self._has_perms_for_integration(self.integration): |
|
91 | if not self._has_perms_for_integration(self.integration): | |
92 | raise HTTPForbidden() |
|
92 | raise HTTPForbidden() | |
93 |
|
93 | |||
94 | self.settings = self.integration and self.integration.settings or {} |
|
94 | self.settings = self.integration and self.integration.settings or {} | |
95 | self.admin_view = not (self.repo or self.repo_group) |
|
95 | self.admin_view = not (self.repo or self.repo_group) | |
96 |
|
96 | |||
97 | def _has_perms_for_integration(self, integration): |
|
97 | def _has_perms_for_integration(self, integration): | |
98 | perms = self.request.user.permissions |
|
98 | perms = self.request.user.permissions | |
99 |
|
99 | |||
100 | if 'hg.admin' in perms['global']: |
|
100 | if 'hg.admin' in perms['global']: | |
101 | return True |
|
101 | return True | |
102 |
|
102 | |||
103 | if integration.repo: |
|
103 | if integration.repo: | |
104 | return perms['repositories'].get( |
|
104 | return perms['repositories'].get( | |
105 | integration.repo.repo_name) == 'repository.admin' |
|
105 | integration.repo.repo_name) == 'repository.admin' | |
106 |
|
106 | |||
107 | if integration.repo_group: |
|
107 | if integration.repo_group: | |
108 | return perms['repositories_groups'].get( |
|
108 | return perms['repositories_groups'].get( | |
109 | integration.repo_group.group_name) == 'group.admin' |
|
109 | integration.repo_group.group_name) == 'group.admin' | |
110 |
|
110 | |||
111 | return False |
|
111 | return False | |
112 |
|
112 | |||
113 | def _get_local_tmpl_context(self, include_app_defaults=True): |
|
113 | def _get_local_tmpl_context(self, include_app_defaults=True): | |
114 | _ = self.request.translate |
|
114 | _ = self.request.translate | |
115 | c = super(IntegrationSettingsViewBase, self)._get_local_tmpl_context( |
|
115 | c = super(IntegrationSettingsViewBase, self)._get_local_tmpl_context( | |
116 | include_app_defaults=include_app_defaults) |
|
116 | include_app_defaults=include_app_defaults) | |
117 |
|
117 | |||
118 | c.active = 'integrations' |
|
118 | c.active = 'integrations' | |
119 |
|
119 | |||
120 | return c |
|
120 | return c | |
121 |
|
121 | |||
122 | def _form_schema(self): |
|
122 | def _form_schema(self): | |
123 | schema = make_integration_schema(IntegrationType=self.IntegrationType, |
|
123 | schema = make_integration_schema(IntegrationType=self.IntegrationType, | |
124 | settings=self.settings) |
|
124 | settings=self.settings) | |
125 |
|
125 | |||
126 | # returns a clone, important if mutating the schema later |
|
126 | # returns a clone, important if mutating the schema later | |
127 | return schema.bind( |
|
127 | return schema.bind( | |
128 | permissions=self.request.user.permissions, |
|
128 | permissions=self.request.user.permissions, | |
129 | no_scope=not self.admin_view) |
|
129 | no_scope=not self.admin_view) | |
130 |
|
130 | |||
131 | def _form_defaults(self): |
|
131 | def _form_defaults(self): | |
132 | _ = self.request.translate |
|
132 | _ = self.request.translate | |
133 | defaults = {} |
|
133 | defaults = {} | |
134 |
|
134 | |||
135 | if self.integration: |
|
135 | if self.integration: | |
136 | defaults['settings'] = self.integration.settings or {} |
|
136 | defaults['settings'] = self.integration.settings or {} | |
137 | defaults['options'] = { |
|
137 | defaults['options'] = { | |
138 | 'name': self.integration.name, |
|
138 | 'name': self.integration.name, | |
139 | 'enabled': self.integration.enabled, |
|
139 | 'enabled': self.integration.enabled, | |
140 | 'scope': { |
|
140 | 'scope': { | |
141 | 'repo': self.integration.repo, |
|
141 | 'repo': self.integration.repo, | |
142 | 'repo_group': self.integration.repo_group, |
|
142 | 'repo_group': self.integration.repo_group, | |
143 | 'child_repos_only': self.integration.child_repos_only, |
|
143 | 'child_repos_only': self.integration.child_repos_only, | |
144 | }, |
|
144 | }, | |
145 | } |
|
145 | } | |
146 | else: |
|
146 | else: | |
147 | if self.repo: |
|
147 | if self.repo: | |
148 | scope = _('{repo_name} repository').format( |
|
148 | scope = _('{repo_name} repository').format( | |
149 | repo_name=self.repo.repo_name) |
|
149 | repo_name=self.repo.repo_name) | |
150 | elif self.repo_group: |
|
150 | elif self.repo_group: | |
151 | scope = _('{repo_group_name} repo group').format( |
|
151 | scope = _('{repo_group_name} repo group').format( | |
152 | repo_group_name=self.repo_group.group_name) |
|
152 | repo_group_name=self.repo_group.group_name) | |
153 | else: |
|
153 | else: | |
154 | scope = _('Global') |
|
154 | scope = _('Global') | |
155 |
|
155 | |||
156 | defaults['options'] = { |
|
156 | defaults['options'] = { | |
157 | 'enabled': True, |
|
157 | 'enabled': True, | |
158 | 'name': _('{name} integration').format( |
|
158 | 'name': _('{name} integration').format( | |
159 | name=self.IntegrationType.display_name), |
|
159 | name=self.IntegrationType.display_name), | |
160 | } |
|
160 | } | |
161 | defaults['options']['scope'] = { |
|
161 | defaults['options']['scope'] = { | |
162 | 'repo': self.repo, |
|
162 | 'repo': self.repo, | |
163 | 'repo_group': self.repo_group, |
|
163 | 'repo_group': self.repo_group, | |
164 | } |
|
164 | } | |
165 |
|
165 | |||
166 | return defaults |
|
166 | return defaults | |
167 |
|
167 | |||
168 | def _delete_integration(self, integration): |
|
168 | def _delete_integration(self, integration): | |
169 | _ = self.request.translate |
|
169 | _ = self.request.translate | |
170 | Session().delete(integration) |
|
170 | Session().delete(integration) | |
171 | Session().commit() |
|
171 | Session().commit() | |
172 | h.flash( |
|
172 | h.flash( | |
173 | _('Integration {integration_name} deleted successfully.').format( |
|
173 | _('Integration {integration_name} deleted successfully.').format( | |
174 | integration_name=integration.name), |
|
174 | integration_name=integration.name), | |
175 | category='success') |
|
175 | category='success') | |
176 |
|
176 | |||
177 | if self.repo: |
|
177 | if self.repo: | |
178 | redirect_to = self.request.route_path( |
|
178 | redirect_to = self.request.route_path( | |
179 | 'repo_integrations_home', repo_name=self.repo.repo_name) |
|
179 | 'repo_integrations_home', repo_name=self.repo.repo_name) | |
180 | elif self.repo_group: |
|
180 | elif self.repo_group: | |
181 | redirect_to = self.request.route_path( |
|
181 | redirect_to = self.request.route_path( | |
182 | 'repo_group_integrations_home', |
|
182 | 'repo_group_integrations_home', | |
183 | repo_group_name=self.repo_group.group_name) |
|
183 | repo_group_name=self.repo_group.group_name) | |
184 | else: |
|
184 | else: | |
185 | redirect_to = self.request.route_path('global_integrations_home') |
|
185 | redirect_to = self.request.route_path('global_integrations_home') | |
186 | raise HTTPFound(redirect_to) |
|
186 | raise HTTPFound(redirect_to) | |
187 |
|
187 | |||
188 | def _integration_list(self): |
|
188 | def _integration_list(self): | |
189 | """ List integrations """ |
|
189 | """ List integrations """ | |
190 |
|
190 | |||
191 | c = self.load_default_context() |
|
191 | c = self.load_default_context() | |
192 | if self.repo: |
|
192 | if self.repo: | |
193 | scope = self.repo |
|
193 | scope = self.repo | |
194 | elif self.repo_group: |
|
194 | elif self.repo_group: | |
195 | scope = self.repo_group |
|
195 | scope = self.repo_group | |
196 | else: |
|
196 | else: | |
197 | scope = 'all' |
|
197 | scope = 'all' | |
198 |
|
198 | |||
199 | integrations = [] |
|
199 | integrations = [] | |
200 |
|
200 | |||
201 | for IntType, integration in IntegrationModel().get_integrations( |
|
201 | for IntType, integration in IntegrationModel().get_integrations( | |
202 | scope=scope, IntegrationType=self.IntegrationType): |
|
202 | scope=scope, IntegrationType=self.IntegrationType): | |
203 |
|
203 | |||
204 | # extra permissions check *just in case* |
|
204 | # extra permissions check *just in case* | |
205 | if not self._has_perms_for_integration(integration): |
|
205 | if not self._has_perms_for_integration(integration): | |
206 | continue |
|
206 | continue | |
207 |
|
207 | |||
208 | integrations.append((IntType, integration)) |
|
208 | integrations.append((IntType, integration)) | |
209 |
|
209 | |||
210 | sort_arg = self.request.GET.get('sort', 'name:asc') |
|
210 | sort_arg = self.request.GET.get('sort', 'name:asc') | |
211 | sort_dir = 'asc' |
|
211 | sort_dir = 'asc' | |
212 | if ':' in sort_arg: |
|
212 | if ':' in sort_arg: | |
213 | sort_field, sort_dir = sort_arg.split(':') |
|
213 | sort_field, sort_dir = sort_arg.split(':') | |
214 | else: |
|
214 | else: | |
215 | sort_field = sort_arg, 'asc' |
|
215 | sort_field = sort_arg, 'asc' | |
216 |
|
216 | |||
217 | assert sort_field in ('name', 'integration_type', 'enabled', 'scope') |
|
217 | assert sort_field in ('name', 'integration_type', 'enabled', 'scope') | |
218 |
|
218 | |||
219 | integrations.sort( |
|
219 | integrations.sort( | |
220 | key=lambda x: getattr(x[1], sort_field), |
|
220 | key=lambda x: getattr(x[1], sort_field), | |
221 | reverse=(sort_dir == 'desc')) |
|
221 | reverse=(sort_dir == 'desc')) | |
222 |
|
222 | |||
223 | page_url = webhelpers.paginate.PageURL( |
|
223 | page_url = webhelpers.paginate.PageURL( | |
224 | self.request.path, self.request.GET) |
|
224 | self.request.path, self.request.GET) | |
225 | page = safe_int(self.request.GET.get('page', 1), 1) |
|
225 | page = safe_int(self.request.GET.get('page', 1), 1) | |
226 |
|
226 | |||
227 | integrations = h.Page( |
|
227 | integrations = h.Page( | |
228 | integrations, page=page, items_per_page=10, url=page_url) |
|
228 | integrations, page=page, items_per_page=10, url=page_url) | |
229 |
|
229 | |||
230 | c.rev_sort_dir = sort_dir != 'desc' and 'desc' or 'asc' |
|
230 | c.rev_sort_dir = sort_dir != 'desc' and 'desc' or 'asc' | |
231 |
|
231 | |||
232 | c.current_IntegrationType = self.IntegrationType |
|
232 | c.current_IntegrationType = self.IntegrationType | |
233 | c.integrations_list = integrations |
|
233 | c.integrations_list = integrations | |
234 | c.available_integrations = integration_type_registry |
|
234 | c.available_integrations = integration_type_registry | |
235 |
|
235 | |||
236 | return self._get_template_context(c) |
|
236 | return self._get_template_context(c) | |
237 |
|
237 | |||
238 | def _settings_get(self, defaults=None, form=None): |
|
238 | def _settings_get(self, defaults=None, form=None): | |
239 | """ |
|
239 | """ | |
240 | View that displays the integration settings as a form. |
|
240 | View that displays the integration settings as a form. | |
241 | """ |
|
241 | """ | |
242 | c = self.load_default_context() |
|
242 | c = self.load_default_context() | |
243 |
|
243 | |||
244 | defaults = defaults or self._form_defaults() |
|
244 | defaults = defaults or self._form_defaults() | |
245 | schema = self._form_schema() |
|
245 | schema = self._form_schema() | |
246 |
|
246 | |||
247 | if self.integration: |
|
247 | if self.integration: | |
248 | buttons = ('submit', 'delete') |
|
248 | buttons = ('submit', 'delete') | |
249 | else: |
|
249 | else: | |
250 | buttons = ('submit',) |
|
250 | buttons = ('submit',) | |
251 |
|
251 | |||
252 | form = form or deform.Form(schema, appstruct=defaults, buttons=buttons) |
|
252 | form = form or deform.Form(schema, appstruct=defaults, buttons=buttons) | |
253 |
|
253 | |||
254 | c.form = form |
|
254 | c.form = form | |
255 | c.current_IntegrationType = self.IntegrationType |
|
255 | c.current_IntegrationType = self.IntegrationType | |
256 | c.integration = self.integration |
|
256 | c.integration = self.integration | |
257 |
|
257 | |||
258 | return self._get_template_context(c) |
|
258 | return self._get_template_context(c) | |
259 |
|
259 | |||
260 | def _settings_post(self): |
|
260 | def _settings_post(self): | |
261 | """ |
|
261 | """ | |
262 | View that validates and stores the integration settings. |
|
262 | View that validates and stores the integration settings. | |
263 | """ |
|
263 | """ | |
264 | _ = self.request.translate |
|
264 | _ = self.request.translate | |
265 |
|
265 | |||
266 | controls = self.request.POST.items() |
|
266 | controls = self.request.POST.items() | |
267 | pstruct = peppercorn.parse(controls) |
|
267 | pstruct = peppercorn.parse(controls) | |
268 |
|
268 | |||
269 | if self.integration and pstruct.get('delete'): |
|
269 | if self.integration and pstruct.get('delete'): | |
270 | return self._delete_integration(self.integration) |
|
270 | return self._delete_integration(self.integration) | |
271 |
|
271 | |||
272 | schema = self._form_schema() |
|
272 | schema = self._form_schema() | |
273 |
|
273 | |||
274 | skip_settings_validation = False |
|
274 | skip_settings_validation = False | |
275 | if self.integration and 'enabled' not in pstruct.get('options', {}): |
|
275 | if self.integration and 'enabled' not in pstruct.get('options', {}): | |
276 | skip_settings_validation = True |
|
276 | skip_settings_validation = True | |
277 | schema['settings'].validator = None |
|
277 | schema['settings'].validator = None | |
278 | for field in schema['settings'].children: |
|
278 | for field in schema['settings'].children: | |
279 | field.validator = None |
|
279 | field.validator = None | |
280 | field.missing = '' |
|
280 | field.missing = '' | |
281 |
|
281 | |||
282 | if self.integration: |
|
282 | if self.integration: | |
283 | buttons = ('submit', 'delete') |
|
283 | buttons = ('submit', 'delete') | |
284 | else: |
|
284 | else: | |
285 | buttons = ('submit',) |
|
285 | buttons = ('submit',) | |
286 |
|
286 | |||
287 | form = deform.Form(schema, buttons=buttons) |
|
287 | form = deform.Form(schema, buttons=buttons) | |
288 |
|
288 | |||
289 | if not self.admin_view: |
|
289 | if not self.admin_view: | |
290 | # scope is read only field in these cases, and has to be added |
|
290 | # scope is read only field in these cases, and has to be added | |
291 | options = pstruct.setdefault('options', {}) |
|
291 | options = pstruct.setdefault('options', {}) | |
292 | if 'scope' not in options: |
|
292 | if 'scope' not in options: | |
293 | options['scope'] = IntegrationScopeType().serialize(None, { |
|
293 | options['scope'] = IntegrationScopeType().serialize(None, { | |
294 | 'repo': self.repo, |
|
294 | 'repo': self.repo, | |
295 | 'repo_group': self.repo_group, |
|
295 | 'repo_group': self.repo_group, | |
296 | }) |
|
296 | }) | |
297 |
|
297 | |||
298 | try: |
|
298 | try: | |
299 | valid_data = form.validate_pstruct(pstruct) |
|
299 | valid_data = form.validate_pstruct(pstruct) | |
300 | except deform.ValidationFailure as e: |
|
300 | except deform.ValidationFailure as e: | |
301 | h.flash( |
|
301 | h.flash( | |
302 | _('Errors exist when saving integration settings. ' |
|
302 | _('Errors exist when saving integration settings. ' | |
303 | 'Please check the form inputs.'), |
|
303 | 'Please check the form inputs.'), | |
304 | category='error') |
|
304 | category='error') | |
305 | return self._settings_get(form=e) |
|
305 | return self._settings_get(form=e) | |
306 |
|
306 | |||
307 | if not self.integration: |
|
307 | if not self.integration: | |
308 | self.integration = Integration() |
|
308 | self.integration = Integration() | |
309 | self.integration.integration_type = self.IntegrationType.key |
|
309 | self.integration.integration_type = self.IntegrationType.key | |
310 | Session().add(self.integration) |
|
310 | Session().add(self.integration) | |
311 |
|
311 | |||
312 | scope = valid_data['options']['scope'] |
|
312 | scope = valid_data['options']['scope'] | |
313 |
|
313 | |||
314 | IntegrationModel().update_integration(self.integration, |
|
314 | IntegrationModel().update_integration(self.integration, | |
315 | name=valid_data['options']['name'], |
|
315 | name=valid_data['options']['name'], | |
316 | enabled=valid_data['options']['enabled'], |
|
316 | enabled=valid_data['options']['enabled'], | |
317 | settings=valid_data['settings'], |
|
317 | settings=valid_data['settings'], | |
318 | repo=scope['repo'], |
|
318 | repo=scope['repo'], | |
319 | repo_group=scope['repo_group'], |
|
319 | repo_group=scope['repo_group'], | |
320 | child_repos_only=scope['child_repos_only'], |
|
320 | child_repos_only=scope['child_repos_only'], | |
321 | ) |
|
321 | ) | |
322 |
|
322 | |||
323 | self.integration.settings = valid_data['settings'] |
|
323 | self.integration.settings = valid_data['settings'] | |
324 | Session().commit() |
|
324 | Session().commit() | |
325 | # Display success message and redirect. |
|
325 | # Display success message and redirect. | |
326 | h.flash( |
|
326 | h.flash( | |
327 | _('Integration {integration_name} updated successfully.').format( |
|
327 | _('Integration {integration_name} updated successfully.').format( | |
328 | integration_name=self.IntegrationType.display_name), |
|
328 | integration_name=self.IntegrationType.display_name), | |
329 | category='success') |
|
329 | category='success') | |
330 |
|
330 | |||
331 | # if integration scope changes, we must redirect to the right place |
|
331 | # if integration scope changes, we must redirect to the right place | |
332 | # keeping in mind if the original view was for /repo/ or /_admin/ |
|
332 | # keeping in mind if the original view was for /repo/ or /_admin/ | |
333 | admin_view = not (self.repo or self.repo_group) |
|
333 | admin_view = not (self.repo or self.repo_group) | |
334 |
|
334 | |||
335 | if self.integration.repo and not admin_view: |
|
335 | if self.integration.repo and not admin_view: | |
336 | redirect_to = self.request.route_path( |
|
336 | redirect_to = self.request.route_path( | |
337 | 'repo_integrations_edit', |
|
337 | 'repo_integrations_edit', | |
338 | repo_name=self.integration.repo.repo_name, |
|
338 | repo_name=self.integration.repo.repo_name, | |
339 | integration=self.integration.integration_type, |
|
339 | integration=self.integration.integration_type, | |
340 | integration_id=self.integration.integration_id) |
|
340 | integration_id=self.integration.integration_id) | |
341 | elif self.integration.repo_group and not admin_view: |
|
341 | elif self.integration.repo_group and not admin_view: | |
342 | redirect_to = self.request.route_path( |
|
342 | redirect_to = self.request.route_path( | |
343 | 'repo_group_integrations_edit', |
|
343 | 'repo_group_integrations_edit', | |
344 | repo_group_name=self.integration.repo_group.group_name, |
|
344 | repo_group_name=self.integration.repo_group.group_name, | |
345 | integration=self.integration.integration_type, |
|
345 | integration=self.integration.integration_type, | |
346 | integration_id=self.integration.integration_id) |
|
346 | integration_id=self.integration.integration_id) | |
347 | else: |
|
347 | else: | |
348 | redirect_to = self.request.route_path( |
|
348 | redirect_to = self.request.route_path( | |
349 | 'global_integrations_edit', |
|
349 | 'global_integrations_edit', | |
350 | integration=self.integration.integration_type, |
|
350 | integration=self.integration.integration_type, | |
351 | integration_id=self.integration.integration_id) |
|
351 | integration_id=self.integration.integration_id) | |
352 |
|
352 | |||
353 | return HTTPFound(redirect_to) |
|
353 | return HTTPFound(redirect_to) | |
354 |
|
354 | |||
355 | def _new_integration(self): |
|
355 | def _new_integration(self): | |
356 | c = self.load_default_context() |
|
356 | c = self.load_default_context() | |
357 | c.available_integrations = integration_type_registry |
|
357 | c.available_integrations = integration_type_registry | |
358 | return self._get_template_context(c) |
|
358 | return self._get_template_context(c) | |
359 |
|
359 | |||
360 | def load_default_context(self): |
|
360 | def load_default_context(self): | |
361 | raise NotImplementedError() |
|
361 | raise NotImplementedError() | |
362 |
|
362 | |||
363 |
|
363 | |||
364 | class GlobalIntegrationsView(IntegrationSettingsViewBase): |
|
364 | class GlobalIntegrationsView(IntegrationSettingsViewBase): | |
365 | def load_default_context(self): |
|
365 | def load_default_context(self): | |
366 | c = self._get_local_tmpl_context() |
|
366 | c = self._get_local_tmpl_context() | |
367 | c.repo = self.repo |
|
367 | c.repo = self.repo | |
368 | c.repo_group = self.repo_group |
|
368 | c.repo_group = self.repo_group | |
369 | c.navlist = navigation_list(self.request) |
|
369 | c.navlist = navigation_list(self.request) | |
370 |
|
370 | |||
371 | return c |
|
371 | return c | |
372 |
|
372 | |||
373 | @LoginRequired() |
|
373 | @LoginRequired() | |
374 | @HasPermissionAnyDecorator('hg.admin') |
|
374 | @HasPermissionAnyDecorator('hg.admin') | |
375 | def integration_list(self): |
|
375 | def integration_list(self): | |
376 | return self._integration_list() |
|
376 | return self._integration_list() | |
377 |
|
377 | |||
378 | @LoginRequired() |
|
378 | @LoginRequired() | |
379 | @HasPermissionAnyDecorator('hg.admin') |
|
379 | @HasPermissionAnyDecorator('hg.admin') | |
380 | def settings_get(self): |
|
380 | def settings_get(self): | |
381 | return self._settings_get() |
|
381 | return self._settings_get() | |
382 |
|
382 | |||
383 | @LoginRequired() |
|
383 | @LoginRequired() | |
384 | @HasPermissionAnyDecorator('hg.admin') |
|
384 | @HasPermissionAnyDecorator('hg.admin') | |
385 | @CSRFRequired() |
|
385 | @CSRFRequired() | |
386 | def settings_post(self): |
|
386 | def settings_post(self): | |
387 | return self._settings_post() |
|
387 | return self._settings_post() | |
388 |
|
388 | |||
389 | @LoginRequired() |
|
389 | @LoginRequired() | |
390 | @HasPermissionAnyDecorator('hg.admin') |
|
390 | @HasPermissionAnyDecorator('hg.admin') | |
391 | def new_integration(self): |
|
391 | def new_integration(self): | |
392 | return self._new_integration() |
|
392 | return self._new_integration() | |
393 |
|
393 | |||
394 |
|
394 | |||
395 | class RepoIntegrationsView(IntegrationSettingsViewBase): |
|
395 | class RepoIntegrationsView(IntegrationSettingsViewBase): | |
396 | def load_default_context(self): |
|
396 | def load_default_context(self): | |
397 | c = self._get_local_tmpl_context() |
|
397 | c = self._get_local_tmpl_context() | |
398 |
|
398 | |||
399 | c.repo = self.repo |
|
399 | c.repo = self.repo | |
400 | c.repo_group = self.repo_group |
|
400 | c.repo_group = self.repo_group | |
401 |
|
401 | |||
402 | self.db_repo = self.repo |
|
402 | self.db_repo = self.repo | |
403 | c.rhodecode_db_repo = self.repo |
|
403 | c.rhodecode_db_repo = self.repo | |
404 | c.repo_name = self.db_repo.repo_name |
|
404 | c.repo_name = self.db_repo.repo_name | |
405 | c.repository_pull_requests = ScmModel().get_pull_requests(self.repo) |
|
405 | c.repository_pull_requests = ScmModel().get_pull_requests(self.repo) | |
406 |
|
406 | |||
407 |
|
||||
408 | return c |
|
407 | return c | |
409 |
|
408 | |||
410 | @LoginRequired() |
|
409 | @LoginRequired() | |
411 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
410 | @HasRepoPermissionAnyDecorator('repository.admin') | |
412 | def integration_list(self): |
|
411 | def integration_list(self): | |
413 | return self._integration_list() |
|
412 | return self._integration_list() | |
414 |
|
413 | |||
415 | @LoginRequired() |
|
414 | @LoginRequired() | |
416 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
415 | @HasRepoPermissionAnyDecorator('repository.admin') | |
417 | def settings_get(self): |
|
416 | def settings_get(self): | |
418 | return self._settings_get() |
|
417 | return self._settings_get() | |
419 |
|
418 | |||
420 | @LoginRequired() |
|
419 | @LoginRequired() | |
421 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
420 | @HasRepoPermissionAnyDecorator('repository.admin') | |
422 | @CSRFRequired() |
|
421 | @CSRFRequired() | |
423 | def settings_post(self): |
|
422 | def settings_post(self): | |
424 | return self._settings_post() |
|
423 | return self._settings_post() | |
425 |
|
424 | |||
426 | @LoginRequired() |
|
425 | @LoginRequired() | |
427 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
426 | @HasRepoPermissionAnyDecorator('repository.admin') | |
428 | def new_integration(self): |
|
427 | def new_integration(self): | |
429 | return self._new_integration() |
|
428 | return self._new_integration() | |
430 |
|
429 | |||
431 |
|
430 | |||
432 | class RepoGroupIntegrationsView(IntegrationSettingsViewBase): |
|
431 | class RepoGroupIntegrationsView(IntegrationSettingsViewBase): | |
433 | def load_default_context(self): |
|
432 | def load_default_context(self): | |
434 | c = self._get_local_tmpl_context() |
|
433 | c = self._get_local_tmpl_context() | |
435 | c.repo = self.repo |
|
434 | c.repo = self.repo | |
436 | c.repo_group = self.repo_group |
|
435 | c.repo_group = self.repo_group | |
437 | c.navlist = navigation_list(self.request) |
|
436 | c.navlist = navigation_list(self.request) | |
438 |
|
437 | |||
439 | return c |
|
438 | return c | |
440 |
|
439 | |||
441 | @LoginRequired() |
|
440 | @LoginRequired() | |
442 | @HasRepoGroupPermissionAnyDecorator('group.admin') |
|
441 | @HasRepoGroupPermissionAnyDecorator('group.admin') | |
443 | def integration_list(self): |
|
442 | def integration_list(self): | |
444 | return self._integration_list() |
|
443 | return self._integration_list() | |
445 |
|
444 | |||
446 | @LoginRequired() |
|
445 | @LoginRequired() | |
447 | @HasRepoGroupPermissionAnyDecorator('group.admin') |
|
446 | @HasRepoGroupPermissionAnyDecorator('group.admin') | |
448 | def settings_get(self): |
|
447 | def settings_get(self): | |
449 | return self._settings_get() |
|
448 | return self._settings_get() | |
450 |
|
449 | |||
451 | @LoginRequired() |
|
450 | @LoginRequired() | |
452 | @HasRepoGroupPermissionAnyDecorator('group.admin') |
|
451 | @HasRepoGroupPermissionAnyDecorator('group.admin') | |
453 | @CSRFRequired() |
|
452 | @CSRFRequired() | |
454 | def settings_post(self): |
|
453 | def settings_post(self): | |
455 | return self._settings_post() |
|
454 | return self._settings_post() | |
456 |
|
455 | |||
457 | @LoginRequired() |
|
456 | @LoginRequired() | |
458 | @HasRepoGroupPermissionAnyDecorator('group.admin') |
|
457 | @HasRepoGroupPermissionAnyDecorator('group.admin') | |
459 | def new_integration(self): |
|
458 | def new_integration(self): | |
460 | return self._new_integration() |
|
459 | return self._new_integration() |
@@ -1,226 +1,225 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 |
|
22 | |||
23 | import deform |
|
23 | import deform | |
24 | import colander |
|
24 | import colander | |
25 |
|
25 | |||
26 | from rhodecode.translation import _ |
|
26 | from rhodecode.translation import _ | |
27 | from rhodecode.model.db import Repository, RepoGroup |
|
27 | from rhodecode.model.db import Repository, RepoGroup | |
28 | from rhodecode.model.validation_schema import validators, preparers |
|
28 | from rhodecode.model.validation_schema import validators, preparers | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | def integration_scope_choices(permissions): |
|
31 | def integration_scope_choices(permissions): | |
32 | """ |
|
32 | """ | |
33 | Return list of (value, label) choices for integration scopes depending on |
|
33 | Return list of (value, label) choices for integration scopes depending on | |
34 | the permissions |
|
34 | the permissions | |
35 | """ |
|
35 | """ | |
36 | result = [('', _('Pick a scope:'))] |
|
36 | result = [('', _('Pick a scope:'))] | |
37 | if 'hg.admin' in permissions['global']: |
|
37 | if 'hg.admin' in permissions['global']: | |
38 | result.extend([ |
|
38 | result.extend([ | |
39 | ('global', _('Global (all repositories)')), |
|
39 | ('global', _('Global (all repositories)')), | |
40 | ('root-repos', _('Top level repositories only')), |
|
40 | ('root-repos', _('Top level repositories only')), | |
41 | ]) |
|
41 | ]) | |
42 |
|
42 | |||
43 | repo_choices = [ |
|
43 | repo_choices = [ | |
44 | ('repo:%s' % repo_name, '/' + repo_name) |
|
44 | ('repo:%s' % repo_name, '/' + repo_name) | |
45 | for repo_name, repo_perm |
|
45 | for repo_name, repo_perm | |
46 | in permissions['repositories'].items() |
|
46 | in permissions['repositories'].items() | |
47 | if repo_perm == 'repository.admin' |
|
47 | if repo_perm == 'repository.admin' | |
48 | ] |
|
48 | ] | |
49 | repogroup_choices = [ |
|
49 | repogroup_choices = [ | |
50 | ('repogroup:%s' % repo_group_name, '/' + repo_group_name + '/ (child repos only)') |
|
50 | ('repogroup:%s' % repo_group_name, '/' + repo_group_name + '/ (child repos only)') | |
51 | for repo_group_name, repo_group_perm |
|
51 | for repo_group_name, repo_group_perm | |
52 | in permissions['repositories_groups'].items() |
|
52 | in permissions['repositories_groups'].items() | |
53 | if repo_group_perm == 'group.admin' |
|
53 | if repo_group_perm == 'group.admin' | |
54 | ] |
|
54 | ] | |
55 | repogroup_recursive_choices = [ |
|
55 | repogroup_recursive_choices = [ | |
56 | ('repogroup-recursive:%s' % repo_group_name, '/' + repo_group_name + '/ (recursive)') |
|
56 | ('repogroup-recursive:%s' % repo_group_name, '/' + repo_group_name + '/ (recursive)') | |
57 | for repo_group_name, repo_group_perm |
|
57 | for repo_group_name, repo_group_perm | |
58 | in permissions['repositories_groups'].items() |
|
58 | in permissions['repositories_groups'].items() | |
59 | if repo_group_perm == 'group.admin' |
|
59 | if repo_group_perm == 'group.admin' | |
60 | ] |
|
60 | ] | |
61 | result.extend( |
|
61 | result.extend( | |
62 | sorted(repogroup_recursive_choices + repogroup_choices + repo_choices, |
|
62 | sorted(repogroup_recursive_choices + repogroup_choices + repo_choices, | |
63 | key=lambda (choice, label): choice.split(':', 1)[1] |
|
63 | key=lambda (choice, label): choice.split(':', 1)[1] | |
64 | ) |
|
64 | ) | |
65 | ) |
|
65 | ) | |
66 | return result |
|
66 | return result | |
67 |
|
67 | |||
68 |
|
68 | |||
69 | @colander.deferred |
|
69 | @colander.deferred | |
70 | def deferred_integration_scopes_validator(node, kw): |
|
70 | def deferred_integration_scopes_validator(node, kw): | |
71 | perms = kw.get('permissions') |
|
71 | perms = kw.get('permissions') | |
72 | def _scope_validator(_node, scope): |
|
72 | def _scope_validator(_node, scope): | |
73 | is_super_admin = 'hg.admin' in perms['global'] |
|
73 | is_super_admin = 'hg.admin' in perms['global'] | |
74 |
|
74 | |||
75 | if scope.get('repo'): |
|
75 | if scope.get('repo'): | |
76 | if (is_super_admin or perms['repositories'].get( |
|
76 | if (is_super_admin or perms['repositories'].get( | |
77 | scope['repo'].repo_name) == 'repository.admin'): |
|
77 | scope['repo'].repo_name) == 'repository.admin'): | |
78 | return True |
|
78 | return True | |
79 | msg = _('Only repo admins can create integrations') |
|
79 | msg = _('Only repo admins can create integrations') | |
80 | raise colander.Invalid(_node, msg) |
|
80 | raise colander.Invalid(_node, msg) | |
81 | elif scope.get('repo_group'): |
|
81 | elif scope.get('repo_group'): | |
82 | if (is_super_admin or perms['repositories_groups'].get( |
|
82 | if (is_super_admin or perms['repositories_groups'].get( | |
83 | scope['repo_group'].group_name) == 'group.admin'): |
|
83 | scope['repo_group'].group_name) == 'group.admin'): | |
84 | return True |
|
84 | return True | |
85 |
|
85 | |||
86 | msg = _('Only repogroup admins can create integrations') |
|
86 | msg = _('Only repogroup admins can create integrations') | |
87 | raise colander.Invalid(_node, msg) |
|
87 | raise colander.Invalid(_node, msg) | |
88 | else: |
|
88 | else: | |
89 | if is_super_admin: |
|
89 | if is_super_admin: | |
90 | return True |
|
90 | return True | |
91 | msg = _('Only superadmins can create global integrations') |
|
91 | msg = _('Only superadmins can create global integrations') | |
92 | raise colander.Invalid(_node, msg) |
|
92 | raise colander.Invalid(_node, msg) | |
93 |
|
93 | |||
94 | return _scope_validator |
|
94 | return _scope_validator | |
95 |
|
95 | |||
96 |
|
96 | |||
97 | @colander.deferred |
|
97 | @colander.deferred | |
98 | def deferred_integration_scopes_widget(node, kw): |
|
98 | def deferred_integration_scopes_widget(node, kw): | |
99 | if kw.get('no_scope'): |
|
99 | if kw.get('no_scope'): | |
100 | return deform.widget.TextInputWidget(readonly=True) |
|
100 | return deform.widget.TextInputWidget(readonly=True) | |
101 |
|
101 | |||
102 | choices = integration_scope_choices(kw.get('permissions')) |
|
102 | choices = integration_scope_choices(kw.get('permissions')) | |
103 | widget = deform.widget.Select2Widget(values=choices) |
|
103 | widget = deform.widget.Select2Widget(values=choices) | |
104 | return widget |
|
104 | return widget | |
105 |
|
105 | |||
106 |
|
106 | |||
107 | class IntegrationScopeType(colander.SchemaType): |
|
107 | class IntegrationScopeType(colander.SchemaType): | |
108 | def serialize(self, node, appstruct): |
|
108 | def serialize(self, node, appstruct): | |
109 | if appstruct is colander.null: |
|
109 | if appstruct is colander.null: | |
110 | return colander.null |
|
110 | return colander.null | |
111 |
|
111 | |||
112 | if appstruct.get('repo'): |
|
112 | if appstruct.get('repo'): | |
113 | return 'repo:%s' % appstruct['repo'].repo_name |
|
113 | return 'repo:%s' % appstruct['repo'].repo_name | |
114 | elif appstruct.get('repo_group'): |
|
114 | elif appstruct.get('repo_group'): | |
115 | if appstruct.get('child_repos_only'): |
|
115 | if appstruct.get('child_repos_only'): | |
116 | return 'repogroup:%s' % appstruct['repo_group'].group_name |
|
116 | return 'repogroup:%s' % appstruct['repo_group'].group_name | |
117 | else: |
|
117 | else: | |
118 | return 'repogroup-recursive:%s' % ( |
|
118 | return 'repogroup-recursive:%s' % ( | |
119 | appstruct['repo_group'].group_name) |
|
119 | appstruct['repo_group'].group_name) | |
120 | else: |
|
120 | else: | |
121 | if appstruct.get('child_repos_only'): |
|
121 | if appstruct.get('child_repos_only'): | |
122 | return 'root-repos' |
|
122 | return 'root-repos' | |
123 | else: |
|
123 | else: | |
124 | return 'global' |
|
124 | return 'global' | |
125 |
|
125 | |||
126 | raise colander.Invalid(node, '%r is not a valid scope' % appstruct) |
|
126 | raise colander.Invalid(node, '%r is not a valid scope' % appstruct) | |
127 |
|
127 | |||
128 | def deserialize(self, node, cstruct): |
|
128 | def deserialize(self, node, cstruct): | |
129 | if cstruct is colander.null: |
|
129 | if cstruct is colander.null: | |
130 | return colander.null |
|
130 | return colander.null | |
131 |
|
131 | |||
132 | if cstruct.startswith('repo:'): |
|
132 | if cstruct.startswith('repo:'): | |
133 | repo = Repository.get_by_repo_name(cstruct.split(':')[1]) |
|
133 | repo = Repository.get_by_repo_name(cstruct.split(':')[1]) | |
134 | if repo: |
|
134 | if repo: | |
135 | return { |
|
135 | return { | |
136 | 'repo': repo, |
|
136 | 'repo': repo, | |
137 | 'repo_group': None, |
|
137 | 'repo_group': None, | |
138 | 'child_repos_only': False, |
|
138 | 'child_repos_only': False, | |
139 | } |
|
139 | } | |
140 | elif cstruct.startswith('repogroup-recursive:'): |
|
140 | elif cstruct.startswith('repogroup-recursive:'): | |
141 | repo_group = RepoGroup.get_by_group_name(cstruct.split(':')[1]) |
|
141 | repo_group = RepoGroup.get_by_group_name(cstruct.split(':')[1]) | |
142 | if repo_group: |
|
142 | if repo_group: | |
143 | return { |
|
143 | return { | |
144 | 'repo': None, |
|
144 | 'repo': None, | |
145 | 'repo_group': repo_group, |
|
145 | 'repo_group': repo_group, | |
146 | 'child_repos_only': False |
|
146 | 'child_repos_only': False | |
147 | } |
|
147 | } | |
148 | elif cstruct.startswith('repogroup:'): |
|
148 | elif cstruct.startswith('repogroup:'): | |
149 | repo_group = RepoGroup.get_by_group_name(cstruct.split(':')[1]) |
|
149 | repo_group = RepoGroup.get_by_group_name(cstruct.split(':')[1]) | |
150 | if repo_group: |
|
150 | if repo_group: | |
151 | return { |
|
151 | return { | |
152 | 'repo': None, |
|
152 | 'repo': None, | |
153 | 'repo_group': repo_group, |
|
153 | 'repo_group': repo_group, | |
154 | 'child_repos_only': True |
|
154 | 'child_repos_only': True | |
155 | } |
|
155 | } | |
156 | elif cstruct == 'global': |
|
156 | elif cstruct == 'global': | |
157 | return { |
|
157 | return { | |
158 | 'repo': None, |
|
158 | 'repo': None, | |
159 | 'repo_group': None, |
|
159 | 'repo_group': None, | |
160 | 'child_repos_only': False |
|
160 | 'child_repos_only': False | |
161 | } |
|
161 | } | |
162 | elif cstruct == 'root-repos': |
|
162 | elif cstruct == 'root-repos': | |
163 | return { |
|
163 | return { | |
164 | 'repo': None, |
|
164 | 'repo': None, | |
165 | 'repo_group': None, |
|
165 | 'repo_group': None, | |
166 | 'child_repos_only': True |
|
166 | 'child_repos_only': True | |
167 | } |
|
167 | } | |
168 |
|
168 | |||
169 | raise colander.Invalid(node, '%r is not a valid scope' % cstruct) |
|
169 | raise colander.Invalid(node, '%r is not a valid scope' % cstruct) | |
170 |
|
170 | |||
171 |
|
171 | |||
172 | class IntegrationOptionsSchemaBase(colander.MappingSchema): |
|
172 | class IntegrationOptionsSchemaBase(colander.MappingSchema): | |
173 |
|
173 | |||
174 | name = colander.SchemaNode( |
|
174 | name = colander.SchemaNode( | |
175 | colander.String(), |
|
175 | colander.String(), | |
176 | description=_('Short name for this integration.'), |
|
176 | description=_('Short name for this integration.'), | |
177 | missing=colander.required, |
|
177 | missing=colander.required, | |
178 | title=_('Integration name'), |
|
178 | title=_('Integration name'), | |
179 | ) |
|
179 | ) | |
180 |
|
180 | |||
181 | scope = colander.SchemaNode( |
|
181 | scope = colander.SchemaNode( | |
182 | IntegrationScopeType(), |
|
182 | IntegrationScopeType(), | |
183 | description=_( |
|
183 | description=_( | |
184 | 'Scope of the integration. Recursive means the integration ' |
|
184 | 'Scope of the integration. Recursive means the integration ' | |
185 | ' runs on all repos of that group and children recursively.'), |
|
185 | ' runs on all repos of that group and children recursively.'), | |
186 | title=_('Integration scope'), |
|
186 | title=_('Integration scope'), | |
187 | validator=deferred_integration_scopes_validator, |
|
187 | validator=deferred_integration_scopes_validator, | |
188 | widget=deferred_integration_scopes_widget, |
|
188 | widget=deferred_integration_scopes_widget, | |
189 | missing=colander.required, |
|
189 | missing=colander.required, | |
190 | ) |
|
190 | ) | |
191 |
|
191 | |||
192 | enabled = colander.SchemaNode( |
|
192 | enabled = colander.SchemaNode( | |
193 | colander.Bool(), |
|
193 | colander.Bool(), | |
194 | default=True, |
|
194 | default=True, | |
195 | description=_('Enable or disable this integration.'), |
|
195 | description=_('Enable or disable this integration.'), | |
196 | missing=False, |
|
196 | missing=False, | |
197 | title=_('Enabled'), |
|
197 | title=_('Enabled'), | |
198 | ) |
|
198 | ) | |
199 |
|
199 | |||
200 |
|
200 | |||
201 |
|
||||
202 | def make_integration_schema(IntegrationType, settings=None): |
|
201 | def make_integration_schema(IntegrationType, settings=None): | |
203 | """ |
|
202 | """ | |
204 | Return a colander schema for an integration type |
|
203 | Return a colander schema for an integration type | |
205 |
|
204 | |||
206 | :param IntegrationType: the integration type class |
|
205 | :param IntegrationType: the integration type class | |
207 | :param settings: existing integration settings dict (optional) |
|
206 | :param settings: existing integration settings dict (optional) | |
208 | """ |
|
207 | """ | |
209 |
|
208 | |||
210 | settings = settings or {} |
|
209 | settings = settings or {} | |
211 | settings_schema = IntegrationType(settings=settings).settings_schema() |
|
210 | settings_schema = IntegrationType(settings=settings).settings_schema() | |
212 |
|
211 | |||
213 | class IntegrationSchema(colander.Schema): |
|
212 | class IntegrationSchema(colander.Schema): | |
214 | options = IntegrationOptionsSchemaBase() |
|
213 | options = IntegrationOptionsSchemaBase() | |
215 |
|
214 | |||
216 | schema = IntegrationSchema() |
|
215 | schema = IntegrationSchema() | |
217 | schema['options'].title = _('General integration options') |
|
216 | schema['options'].title = _('General integration options') | |
218 |
|
217 | |||
219 | settings_schema.name = 'settings' |
|
218 | settings_schema.name = 'settings' | |
220 | settings_schema.title = _('{integration_type} settings').format( |
|
219 | settings_schema.title = _('{integration_type} settings').format( | |
221 | integration_type=IntegrationType.display_name) |
|
220 | integration_type=IntegrationType.display_name) | |
222 | schema.add(settings_schema) |
|
221 | schema.add(settings_schema) | |
223 |
|
222 | |||
224 | return schema |
|
223 | return schema | |
225 |
|
224 | |||
226 |
|
225 |
@@ -1,53 +1,54 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | import pytest |
|
22 | import pytest | |
23 | from rhodecode import events |
|
23 | from rhodecode import events | |
|
24 | from rhodecode.lib.utils2 import AttributeDict | |||
24 |
|
25 | |||
25 |
|
26 | |||
26 | @pytest.fixture |
|
27 | @pytest.fixture | |
27 | def repo_push_event(backend, user_regular): |
|
28 | def repo_push_event(backend, user_regular): | |
28 | commits = [ |
|
29 | commits = [ | |
29 | {'message': 'ancestor commit fixes #15'}, |
|
30 | {'message': 'ancestor commit fixes #15'}, | |
30 | {'message': 'quick fixes'}, |
|
31 | {'message': 'quick fixes'}, | |
31 | {'message': 'change that fixes #41, #2'}, |
|
32 | {'message': 'change that fixes #41, #2'}, | |
32 | {'message': 'this is because 5b23c3532 broke stuff'}, |
|
33 | {'message': 'this is because 5b23c3532 broke stuff'}, | |
33 | {'message': 'last commit'}, |
|
34 | {'message': 'last commit'}, | |
34 | ] |
|
35 | ] | |
35 | commit_ids = backend.create_master_repo(commits).values() |
|
36 | commit_ids = backend.create_master_repo(commits).values() | |
36 | repo = backend.create_repo() |
|
37 | repo = backend.create_repo() | |
37 | scm_extras = { |
|
38 | scm_extras = AttributeDict({ | |
38 | 'ip': '127.0.0.1', |
|
39 | 'ip': '127.0.0.1', | |
39 | 'username': user_regular.username, |
|
40 | 'username': user_regular.username, | |
40 | 'user_id': user_regular.user_id, |
|
41 | 'user_id': user_regular.user_id, | |
41 | 'action': '', |
|
42 | 'action': '', | |
42 | 'repository': repo.repo_name, |
|
43 | 'repository': repo.repo_name, | |
43 | 'scm': repo.scm_instance().alias, |
|
44 | 'scm': repo.scm_instance().alias, | |
44 | 'config': '', |
|
45 | 'config': '', | |
45 | 'server_url': 'http://example.com', |
|
46 | 'server_url': 'http://example.com', | |
46 | 'make_lock': None, |
|
47 | 'make_lock': None, | |
47 | 'locked_by': [None], |
|
48 | 'locked_by': [None], | |
48 | 'commit_ids': commit_ids, |
|
49 | 'commit_ids': commit_ids, | |
49 | } |
|
50 | }) | |
50 |
|
51 | |||
51 | return events.RepoPushEvent(repo_name=repo.repo_name, |
|
52 | return events.RepoPushEvent(repo_name=repo.repo_name, | |
52 | pushed_commit_ids=commit_ids, |
|
53 | pushed_commit_ids=commit_ids, | |
53 | extras=scm_extras) |
|
54 | extras=scm_extras) |
@@ -1,154 +1,172 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Base for test suite for making push/pull operations. |
|
22 | Base for test suite for making push/pull operations. | |
23 |
|
23 | |||
24 | .. important:: |
|
24 | .. important:: | |
25 |
|
25 | |||
26 | You must have git >= 1.8.5 for tests to work fine. With 68b939b git started |
|
26 | You must have git >= 1.8.5 for tests to work fine. With 68b939b git started | |
27 | to redirect things to stderr instead of stdout. |
|
27 | to redirect things to stderr instead of stdout. | |
28 | """ |
|
28 | """ | |
29 |
|
29 | |||
30 | from os.path import join as jn |
|
30 | from os.path import join as jn | |
31 | from subprocess32 import Popen, PIPE |
|
31 | from subprocess32 import Popen, PIPE | |
32 | import logging |
|
32 | import logging | |
33 | import os |
|
33 | import os | |
34 | import tempfile |
|
34 | import tempfile | |
35 |
|
35 | |||
36 | from rhodecode.tests import GIT_REPO, HG_REPO |
|
36 | from rhodecode.tests import GIT_REPO, HG_REPO | |
37 |
|
37 | |||
38 | DEBUG = True |
|
38 | DEBUG = True | |
39 | RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log') |
|
39 | RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log') | |
40 | REPO_GROUP = 'a_repo_group' |
|
40 | REPO_GROUP = 'a_repo_group' | |
41 | HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO) |
|
41 | HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO) | |
42 | GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO) |
|
42 | GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO) | |
43 |
|
43 | |||
44 | log = logging.getLogger(__name__) |
|
44 | log = logging.getLogger(__name__) | |
45 |
|
45 | |||
46 |
|
46 | |||
47 | class Command(object): |
|
47 | class Command(object): | |
48 |
|
48 | |||
49 | def __init__(self, cwd): |
|
49 | def __init__(self, cwd): | |
50 | self.cwd = cwd |
|
50 | self.cwd = cwd | |
51 | self.process = None |
|
51 | self.process = None | |
52 |
|
52 | |||
53 | def execute(self, cmd, *args): |
|
53 | def execute(self, cmd, *args): | |
54 | """ |
|
54 | """ | |
55 | Runs command on the system with given ``args``. |
|
55 | Runs command on the system with given ``args``. | |
56 | """ |
|
56 | """ | |
57 |
|
57 | |||
58 | command = cmd + ' ' + ' '.join(args) |
|
58 | command = cmd + ' ' + ' '.join(args) | |
59 | if DEBUG: |
|
59 | if DEBUG: | |
60 | log.debug('*** CMD %s ***' % (command,)) |
|
60 | log.debug('*** CMD %s ***' % (command,)) | |
61 |
|
61 | |||
62 | env = dict(os.environ) |
|
62 | env = dict(os.environ) | |
63 | # Delete coverage variables, as they make the test fail for Mercurial |
|
63 | # Delete coverage variables, as they make the test fail for Mercurial | |
64 | for key in env.keys(): |
|
64 | for key in env.keys(): | |
65 | if key.startswith('COV_CORE_'): |
|
65 | if key.startswith('COV_CORE_'): | |
66 | del env[key] |
|
66 | del env[key] | |
67 |
|
67 | |||
68 | self.process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, |
|
68 | self.process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, | |
69 | cwd=self.cwd, env=env) |
|
69 | cwd=self.cwd, env=env) | |
70 | stdout, stderr = self.process.communicate() |
|
70 | stdout, stderr = self.process.communicate() | |
71 | if DEBUG: |
|
71 | if DEBUG: | |
72 | log.debug('STDOUT:%s' % (stdout,)) |
|
72 | log.debug('STDOUT:%s' % (stdout,)) | |
73 | log.debug('STDERR:%s' % (stderr,)) |
|
73 | log.debug('STDERR:%s' % (stderr,)) | |
74 | return stdout, stderr |
|
74 | return stdout, stderr | |
75 |
|
75 | |||
76 | def assert_returncode_success(self): |
|
76 | def assert_returncode_success(self): | |
77 | assert self.process.returncode == 0 |
|
77 | assert self.process.returncode == 0 | |
78 |
|
78 | |||
79 |
|
79 | |||
80 | def _add_files_and_push(vcs, dest, clone_url=None, **kwargs): |
|
80 | def _add_files_and_push(vcs, dest, clone_url=None, tags=None, **kwargs): | |
81 | """ |
|
81 | """ | |
82 | Generate some files, add it to DEST repo and push back |
|
82 | Generate some files, add it to DEST repo and push back | |
83 | vcs is git or hg and defines what VCS we want to make those files for |
|
83 | vcs is git or hg and defines what VCS we want to make those files for | |
84 | """ |
|
84 | """ | |
85 | # commit some stuff into this repo |
|
85 | # commit some stuff into this repo | |
|
86 | tags = tags or [] | |||
86 | cwd = path = jn(dest) |
|
87 | cwd = path = jn(dest) | |
87 | added_file = jn(path, '%ssetup.py' % tempfile._RandomNameSequence().next()) |
|
88 | added_file = jn(path, '%ssetup.py' % tempfile._RandomNameSequence().next()) | |
88 | Command(cwd).execute('touch %s' % added_file) |
|
89 | Command(cwd).execute('touch %s' % added_file) | |
89 | Command(cwd).execute('%s add %s' % (vcs, added_file)) |
|
90 | Command(cwd).execute('%s add %s' % (vcs, added_file)) | |
90 | author_str = 'Marcin KuΕΊminski <me@email.com>' |
|
91 | author_str = 'Marcin KuΕΊminski <me@email.com>' | |
91 |
|
92 | |||
92 | git_ident = "git config user.name {} && git config user.email {}".format( |
|
93 | git_ident = "git config user.name {} && git config user.email {}".format( | |
93 | 'Marcin KuΕΊminski', 'me@email.com') |
|
94 | 'Marcin KuΕΊminski', 'me@email.com') | |
94 |
|
95 | |||
95 |
for i in |
|
96 | for i in range(kwargs.get('files_no', 3)): | |
96 | cmd = """echo 'added_line%s' >> %s""" % (i, added_file) |
|
97 | cmd = """echo 'added_line%s' >> %s""" % (i, added_file) | |
97 | Command(cwd).execute(cmd) |
|
98 | Command(cwd).execute(cmd) | |
98 | if vcs == 'hg': |
|
99 | if vcs == 'hg': | |
99 | cmd = """hg commit -m 'commited new %s' -u '%s' %s """ % ( |
|
100 | cmd = """hg commit -m 'commited new %s' -u '%s' %s """ % ( | |
100 | i, author_str, added_file |
|
101 | i, author_str, added_file | |
101 | ) |
|
102 | ) | |
102 | elif vcs == 'git': |
|
103 | elif vcs == 'git': | |
103 | cmd = """%s && git commit -m 'commited new %s' %s""" % ( |
|
104 | cmd = """%s && git commit -m 'commited new %s' %s""" % ( | |
104 | git_ident, i, added_file) |
|
105 | git_ident, i, added_file) | |
105 | Command(cwd).execute(cmd) |
|
106 | Command(cwd).execute(cmd) | |
106 |
|
107 | |||
|
108 | for tag in tags: | |||
|
109 | if vcs == 'hg': | |||
|
110 | stdout, stderr = Command(cwd).execute( | |||
|
111 | 'hg tag', tag['name']) | |||
|
112 | elif vcs == 'git': | |||
|
113 | if tag['commit']: | |||
|
114 | # annotated tag | |||
|
115 | stdout, stderr = Command(cwd).execute( | |||
|
116 | """%s && git tag -a %s -m "%s" """ % ( | |||
|
117 | git_ident, tag['name'], tag['commit'])) | |||
|
118 | else: | |||
|
119 | # lightweight tag | |||
|
120 | stdout, stderr = Command(cwd).execute( | |||
|
121 | """%s && git tag %s""" % ( | |||
|
122 | git_ident, tag['name'])) | |||
|
123 | ||||
107 | # PUSH it back |
|
124 | # PUSH it back | |
108 | stdout = stderr = None |
|
125 | stdout = stderr = None | |
109 | if vcs == 'hg': |
|
126 | if vcs == 'hg': | |
110 | stdout, stderr = Command(cwd).execute( |
|
127 | stdout, stderr = Command(cwd).execute( | |
111 | 'hg push --verbose', clone_url) |
|
128 | 'hg push --verbose', clone_url) | |
112 | elif vcs == 'git': |
|
129 | elif vcs == 'git': | |
113 | stdout, stderr = Command(cwd).execute( |
|
130 | stdout, stderr = Command(cwd).execute( | |
114 | """%s && git push --verbose %s master""" % ( |
|
131 | """%s && | |
|
132 | git push --verbose --tags %s master""" % ( | |||
115 | git_ident, clone_url)) |
|
133 | git_ident, clone_url)) | |
116 |
|
134 | |||
117 | return stdout, stderr |
|
135 | return stdout, stderr | |
118 |
|
136 | |||
119 |
|
137 | |||
120 | def _check_proper_git_push( |
|
138 | def _check_proper_git_push( | |
121 | stdout, stderr, branch='master', should_set_default_branch=False): |
|
139 | stdout, stderr, branch='master', should_set_default_branch=False): | |
122 | # Note: Git is writing most information to stderr intentionally |
|
140 | # Note: Git is writing most information to stderr intentionally | |
123 | assert 'fatal' not in stderr |
|
141 | assert 'fatal' not in stderr | |
124 | assert 'rejected' not in stderr |
|
142 | assert 'rejected' not in stderr | |
125 | assert 'Pushing to' in stderr |
|
143 | assert 'Pushing to' in stderr | |
126 | assert '%s -> %s' % (branch, branch) in stderr |
|
144 | assert '%s -> %s' % (branch, branch) in stderr | |
127 |
|
145 | |||
128 | if should_set_default_branch: |
|
146 | if should_set_default_branch: | |
129 | assert "Setting default branch to %s" % branch in stderr |
|
147 | assert "Setting default branch to %s" % branch in stderr | |
130 | else: |
|
148 | else: | |
131 | assert "Setting default branch" not in stderr |
|
149 | assert "Setting default branch" not in stderr | |
132 |
|
150 | |||
133 |
|
151 | |||
134 | def _check_proper_hg_push(stdout, stderr, branch='default'): |
|
152 | def _check_proper_hg_push(stdout, stderr, branch='default'): | |
135 | assert 'pushing to' in stdout |
|
153 | assert 'pushing to' in stdout | |
136 | assert 'searching for changes' in stdout |
|
154 | assert 'searching for changes' in stdout | |
137 |
|
155 | |||
138 | assert 'abort:' not in stderr |
|
156 | assert 'abort:' not in stderr | |
139 |
|
157 | |||
140 |
|
158 | |||
141 | def _check_proper_clone(stdout, stderr, vcs): |
|
159 | def _check_proper_clone(stdout, stderr, vcs): | |
142 | if vcs == 'hg': |
|
160 | if vcs == 'hg': | |
143 | assert 'requesting all changes' in stdout |
|
161 | assert 'requesting all changes' in stdout | |
144 | assert 'adding changesets' in stdout |
|
162 | assert 'adding changesets' in stdout | |
145 | assert 'adding manifests' in stdout |
|
163 | assert 'adding manifests' in stdout | |
146 | assert 'adding file changes' in stdout |
|
164 | assert 'adding file changes' in stdout | |
147 |
|
165 | |||
148 | assert stderr == '' |
|
166 | assert stderr == '' | |
149 |
|
167 | |||
150 | if vcs == 'git': |
|
168 | if vcs == 'git': | |
151 | assert '' == stdout |
|
169 | assert '' == stdout | |
152 | assert 'Cloning into' in stderr |
|
170 | assert 'Cloning into' in stderr | |
153 | assert 'abort:' not in stderr |
|
171 | assert 'abort:' not in stderr | |
154 | assert 'fatal:' not in stderr |
|
172 | assert 'fatal:' not in stderr |
@@ -1,264 +1,271 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | py.test config for test suite for making push/pull operations. |
|
22 | py.test config for test suite for making push/pull operations. | |
23 |
|
23 | |||
24 | .. important:: |
|
24 | .. important:: | |
25 |
|
25 | |||
26 | You must have git >= 1.8.5 for tests to work fine. With 68b939b git started |
|
26 | You must have git >= 1.8.5 for tests to work fine. With 68b939b git started | |
27 | to redirect things to stderr instead of stdout. |
|
27 | to redirect things to stderr instead of stdout. | |
28 | """ |
|
28 | """ | |
29 |
|
29 | |||
30 | import ConfigParser |
|
30 | import ConfigParser | |
31 | import os |
|
31 | import os | |
32 | import subprocess32 |
|
32 | import subprocess32 | |
33 | import tempfile |
|
33 | import tempfile | |
34 | import textwrap |
|
34 | import textwrap | |
35 | import pytest |
|
35 | import pytest | |
36 |
|
36 | |||
37 | import rhodecode |
|
37 | import rhodecode | |
38 | from rhodecode.model.db import Repository |
|
38 | from rhodecode.model.db import Repository | |
39 | from rhodecode.model.meta import Session |
|
39 | from rhodecode.model.meta import Session | |
40 | from rhodecode.model.settings import SettingsModel |
|
40 | from rhodecode.model.settings import SettingsModel | |
41 | from rhodecode.tests import ( |
|
41 | from rhodecode.tests import ( | |
42 | GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,) |
|
42 | GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,) | |
43 | from rhodecode.tests.fixture import Fixture |
|
43 | from rhodecode.tests.fixture import Fixture | |
44 | from rhodecode.tests.utils import is_url_reachable, wait_for_url |
|
44 | from rhodecode.tests.utils import is_url_reachable, wait_for_url | |
45 |
|
45 | |||
46 | RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log') |
|
46 | RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log') | |
47 | REPO_GROUP = 'a_repo_group' |
|
47 | REPO_GROUP = 'a_repo_group' | |
48 | HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO) |
|
48 | HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO) | |
49 | GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO) |
|
49 | GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO) | |
50 |
|
50 | |||
51 |
|
51 | |||
52 | def assert_no_running_instance(url): |
|
52 | def assert_no_running_instance(url): | |
53 | if is_url_reachable(url): |
|
53 | if is_url_reachable(url): | |
54 | print("Hint: Usually this means another instance of Enterprise " |
|
54 | print("Hint: Usually this means another instance of Enterprise " | |
55 | "is running in the background.") |
|
55 | "is running in the background.") | |
56 | pytest.fail( |
|
56 | pytest.fail( | |
57 | "Port is not free at %s, cannot start web interface" % url) |
|
57 | "Port is not free at %s, cannot start web interface" % url) | |
58 |
|
58 | |||
59 |
|
59 | |||
|
60 | def get_port(pyramid_config): | |||
|
61 | config = ConfigParser.ConfigParser() | |||
|
62 | config.read(pyramid_config) | |||
|
63 | return config.get('server:main', 'port') | |||
|
64 | ||||
|
65 | ||||
60 | def get_host_url(pyramid_config): |
|
66 | def get_host_url(pyramid_config): | |
61 | """Construct the host url using the port in the test configuration.""" |
|
67 | """Construct the host url using the port in the test configuration.""" | |
62 | config = ConfigParser.ConfigParser() |
|
68 | return '127.0.0.1:%s' % get_port(pyramid_config) | |
63 | config.read(pyramid_config) |
|
|||
64 |
|
||||
65 | return '127.0.0.1:%s' % config.get('server:main', 'port') |
|
|||
66 |
|
69 | |||
67 |
|
70 | |||
68 | class RcWebServer(object): |
|
71 | class RcWebServer(object): | |
69 | """ |
|
72 | """ | |
70 | Represents a running RCE web server used as a test fixture. |
|
73 | Represents a running RCE web server used as a test fixture. | |
71 | """ |
|
74 | """ | |
72 | def __init__(self, pyramid_config): |
|
75 | def __init__(self, pyramid_config, log_file): | |
73 | self.pyramid_config = pyramid_config |
|
76 | self.pyramid_config = pyramid_config | |
|
77 | self.log_file = log_file | |||
74 |
|
78 | |||
75 | def repo_clone_url(self, repo_name, **kwargs): |
|
79 | def repo_clone_url(self, repo_name, **kwargs): | |
76 | params = { |
|
80 | params = { | |
77 | 'user': TEST_USER_ADMIN_LOGIN, |
|
81 | 'user': TEST_USER_ADMIN_LOGIN, | |
78 | 'passwd': TEST_USER_ADMIN_PASS, |
|
82 | 'passwd': TEST_USER_ADMIN_PASS, | |
79 | 'host': get_host_url(self.pyramid_config), |
|
83 | 'host': get_host_url(self.pyramid_config), | |
80 | 'cloned_repo': repo_name, |
|
84 | 'cloned_repo': repo_name, | |
81 | } |
|
85 | } | |
82 | params.update(**kwargs) |
|
86 | params.update(**kwargs) | |
83 | _url = 'http://%(user)s:%(passwd)s@%(host)s/%(cloned_repo)s' % params |
|
87 | _url = 'http://%(user)s:%(passwd)s@%(host)s/%(cloned_repo)s' % params | |
84 | return _url |
|
88 | return _url | |
85 |
|
89 | |||
86 | def host_url(self): |
|
90 | def host_url(self): | |
87 | return 'http://' + get_host_url(self.pyramid_config) |
|
91 | return 'http://' + get_host_url(self.pyramid_config) | |
88 |
|
92 | |||
|
93 | def get_rc_log(self): | |||
|
94 | with open(self.log_file) as f: | |||
|
95 | return f.read() | |||
|
96 | ||||
89 |
|
97 | |||
90 | @pytest.fixture(scope="module") |
|
98 | @pytest.fixture(scope="module") | |
91 | def rcextensions(request, baseapp, tmpdir_factory): |
|
99 | def rcextensions(request, baseapp, tmpdir_factory): | |
92 | """ |
|
100 | """ | |
93 | Installs a testing rcextensions pack to ensure they work as expected. |
|
101 | Installs a testing rcextensions pack to ensure they work as expected. | |
94 | """ |
|
102 | """ | |
95 | init_content = textwrap.dedent(""" |
|
103 | init_content = textwrap.dedent(""" | |
96 | # Forward import the example rcextensions to make it |
|
104 | # Forward import the example rcextensions to make it | |
97 | # active for our tests. |
|
105 | # active for our tests. | |
98 | from rhodecode.tests.other.example_rcextensions import * |
|
106 | from rhodecode.tests.other.example_rcextensions import * | |
99 | """) |
|
107 | """) | |
100 |
|
108 | |||
101 | # Note: rcextensions are looked up based on the path of the ini file |
|
109 | # Note: rcextensions are looked up based on the path of the ini file | |
102 | root_path = tmpdir_factory.getbasetemp() |
|
110 | root_path = tmpdir_factory.getbasetemp() | |
103 | rcextensions_path = root_path.join('rcextensions') |
|
111 | rcextensions_path = root_path.join('rcextensions') | |
104 | init_path = rcextensions_path.join('__init__.py') |
|
112 | init_path = rcextensions_path.join('__init__.py') | |
105 |
|
113 | |||
106 | if rcextensions_path.check(): |
|
114 | if rcextensions_path.check(): | |
107 | pytest.fail( |
|
115 | pytest.fail( | |
108 | "Path for rcextensions already exists, please clean up before " |
|
116 | "Path for rcextensions already exists, please clean up before " | |
109 | "test run this path: %s" % (rcextensions_path, )) |
|
117 | "test run this path: %s" % (rcextensions_path, )) | |
110 | return |
|
118 | return | |
111 |
|
119 | |||
112 | request.addfinalizer(rcextensions_path.remove) |
|
120 | request.addfinalizer(rcextensions_path.remove) | |
113 | init_path.write_binary(init_content, ensure=True) |
|
121 | init_path.write_binary(init_content, ensure=True) | |
114 |
|
122 | |||
115 |
|
123 | |||
116 | @pytest.fixture(scope="module") |
|
124 | @pytest.fixture(scope="module") | |
117 | def repos(request, baseapp): |
|
125 | def repos(request, baseapp): | |
118 | """Create a copy of each test repo in a repo group.""" |
|
126 | """Create a copy of each test repo in a repo group.""" | |
119 | fixture = Fixture() |
|
127 | fixture = Fixture() | |
120 | repo_group = fixture.create_repo_group(REPO_GROUP) |
|
128 | repo_group = fixture.create_repo_group(REPO_GROUP) | |
121 | repo_group_id = repo_group.group_id |
|
129 | repo_group_id = repo_group.group_id | |
122 | fixture.create_fork(HG_REPO, HG_REPO, |
|
130 | fixture.create_fork(HG_REPO, HG_REPO, | |
123 | repo_name_full=HG_REPO_WITH_GROUP, |
|
131 | repo_name_full=HG_REPO_WITH_GROUP, | |
124 | repo_group=repo_group_id) |
|
132 | repo_group=repo_group_id) | |
125 | fixture.create_fork(GIT_REPO, GIT_REPO, |
|
133 | fixture.create_fork(GIT_REPO, GIT_REPO, | |
126 | repo_name_full=GIT_REPO_WITH_GROUP, |
|
134 | repo_name_full=GIT_REPO_WITH_GROUP, | |
127 | repo_group=repo_group_id) |
|
135 | repo_group=repo_group_id) | |
128 |
|
136 | |||
129 | @request.addfinalizer |
|
137 | @request.addfinalizer | |
130 | def cleanup(): |
|
138 | def cleanup(): | |
131 | fixture.destroy_repo(HG_REPO_WITH_GROUP) |
|
139 | fixture.destroy_repo(HG_REPO_WITH_GROUP) | |
132 | fixture.destroy_repo(GIT_REPO_WITH_GROUP) |
|
140 | fixture.destroy_repo(GIT_REPO_WITH_GROUP) | |
133 | fixture.destroy_repo_group(repo_group_id) |
|
141 | fixture.destroy_repo_group(repo_group_id) | |
134 |
|
142 | |||
135 |
|
143 | |||
136 | @pytest.fixture(scope="module") |
|
144 | @pytest.fixture(scope="module") | |
137 | def rc_web_server_config(testini_factory): |
|
145 | def rc_web_server_config(testini_factory): | |
138 | """ |
|
146 | """ | |
139 | Configuration file used for the fixture `rc_web_server`. |
|
147 | Configuration file used for the fixture `rc_web_server`. | |
140 | """ |
|
148 | """ | |
141 | CUSTOM_PARAMS = [ |
|
149 | CUSTOM_PARAMS = [ | |
142 | {'handler_console': {'level': 'DEBUG'}}, |
|
150 | {'handler_console': {'level': 'DEBUG'}}, | |
143 | ] |
|
151 | ] | |
144 | return testini_factory(CUSTOM_PARAMS) |
|
152 | return testini_factory(CUSTOM_PARAMS) | |
145 |
|
153 | |||
146 |
|
154 | |||
147 | @pytest.fixture(scope="module") |
|
155 | @pytest.fixture(scope="module") | |
148 | def rc_web_server( |
|
156 | def rc_web_server( | |
149 | request, baseapp, rc_web_server_config, repos, rcextensions): |
|
157 | request, baseapp, rc_web_server_config, repos, rcextensions): | |
150 | """ |
|
158 | """ | |
151 | Run the web server as a subprocess. |
|
159 | Run the web server as a subprocess. | |
152 |
|
160 | |||
153 | Since we have already a running vcsserver, this is not spawned again. |
|
161 | Since we have already a running vcsserver, this is not spawned again. | |
154 | """ |
|
162 | """ | |
155 | env = os.environ.copy() |
|
163 | env = os.environ.copy() | |
156 | env['RC_NO_TMP_PATH'] = '1' |
|
164 | env['RC_NO_TMP_PATH'] = '1' | |
157 |
|
165 | |||
158 | rc_log = RC_LOG |
|
166 | rc_log = list(RC_LOG.partition('.log')) | |
159 | server_out = open(rc_log, 'w') |
|
167 | rc_log.insert(1, get_port(rc_web_server_config)) | |
|
168 | rc_log = ''.join(rc_log) | |||
160 |
|
169 | |||
161 | # TODO: Would be great to capture the output and err of the subprocess |
|
170 | server_out = open(rc_log, 'w') | |
162 | # and make it available in a section of the py.test report in case of an |
|
|||
163 | # error. |
|
|||
164 |
|
171 | |||
165 | host_url = 'http://' + get_host_url(rc_web_server_config) |
|
172 | host_url = 'http://' + get_host_url(rc_web_server_config) | |
166 | assert_no_running_instance(host_url) |
|
173 | assert_no_running_instance(host_url) | |
167 | command = ['pserve', rc_web_server_config] |
|
174 | command = ['pserve', rc_web_server_config] | |
168 |
|
175 | |||
169 | print('Starting rhodecode server: {}'.format(host_url)) |
|
176 | print('Starting rhodecode server: {}'.format(host_url)) | |
170 | print('Command: {}'.format(command)) |
|
177 | print('Command: {}'.format(command)) | |
171 | print('Logfile: {}'.format(rc_log)) |
|
178 | print('Logfile: {}'.format(rc_log)) | |
172 |
|
179 | |||
173 | proc = subprocess32.Popen( |
|
180 | proc = subprocess32.Popen( | |
174 | command, bufsize=0, env=env, stdout=server_out, stderr=server_out) |
|
181 | command, bufsize=0, env=env, stdout=server_out, stderr=server_out) | |
175 |
|
182 | |||
176 | wait_for_url(host_url, timeout=30) |
|
183 | wait_for_url(host_url, timeout=30) | |
177 |
|
184 | |||
178 | @request.addfinalizer |
|
185 | @request.addfinalizer | |
179 | def stop_web_server(): |
|
186 | def stop_web_server(): | |
180 | # TODO: Find out how to integrate with the reporting of py.test to |
|
187 | # TODO: Find out how to integrate with the reporting of py.test to | |
181 | # make this information available. |
|
188 | # make this information available. | |
182 | print("\nServer log file written to %s" % (rc_log, )) |
|
189 | print("\nServer log file written to %s" % (rc_log, )) | |
183 | proc.kill() |
|
190 | proc.kill() | |
184 | server_out.flush() |
|
191 | server_out.flush() | |
185 | server_out.close() |
|
192 | server_out.close() | |
186 |
|
193 | |||
187 | return RcWebServer(rc_web_server_config) |
|
194 | return RcWebServer(rc_web_server_config, log_file=rc_log) | |
188 |
|
195 | |||
189 |
|
196 | |||
190 | @pytest.fixture |
|
197 | @pytest.fixture | |
191 | def disable_locking(baseapp): |
|
198 | def disable_locking(baseapp): | |
192 | r = Repository.get_by_repo_name(GIT_REPO) |
|
199 | r = Repository.get_by_repo_name(GIT_REPO) | |
193 | Repository.unlock(r) |
|
200 | Repository.unlock(r) | |
194 | r.enable_locking = False |
|
201 | r.enable_locking = False | |
195 | Session().add(r) |
|
202 | Session().add(r) | |
196 | Session().commit() |
|
203 | Session().commit() | |
197 |
|
204 | |||
198 | r = Repository.get_by_repo_name(HG_REPO) |
|
205 | r = Repository.get_by_repo_name(HG_REPO) | |
199 | Repository.unlock(r) |
|
206 | Repository.unlock(r) | |
200 | r.enable_locking = False |
|
207 | r.enable_locking = False | |
201 | Session().add(r) |
|
208 | Session().add(r) | |
202 | Session().commit() |
|
209 | Session().commit() | |
203 |
|
210 | |||
204 |
|
211 | |||
205 | @pytest.fixture |
|
212 | @pytest.fixture | |
206 | def enable_auth_plugins(request, baseapp, csrf_token): |
|
213 | def enable_auth_plugins(request, baseapp, csrf_token): | |
207 | """ |
|
214 | """ | |
208 | Return a factory object that when called, allows to control which |
|
215 | Return a factory object that when called, allows to control which | |
209 | authentication plugins are enabled. |
|
216 | authentication plugins are enabled. | |
210 | """ |
|
217 | """ | |
211 | def _enable_plugins(plugins_list, override=None): |
|
218 | def _enable_plugins(plugins_list, override=None): | |
212 | override = override or {} |
|
219 | override = override or {} | |
213 | params = { |
|
220 | params = { | |
214 | 'auth_plugins': ','.join(plugins_list), |
|
221 | 'auth_plugins': ','.join(plugins_list), | |
215 | } |
|
222 | } | |
216 |
|
223 | |||
217 | # helper translate some names to others |
|
224 | # helper translate some names to others | |
218 | name_map = { |
|
225 | name_map = { | |
219 | 'token': 'authtoken' |
|
226 | 'token': 'authtoken' | |
220 | } |
|
227 | } | |
221 |
|
228 | |||
222 | for module in plugins_list: |
|
229 | for module in plugins_list: | |
223 | plugin_name = module.partition('#')[-1] |
|
230 | plugin_name = module.partition('#')[-1] | |
224 | if plugin_name in name_map: |
|
231 | if plugin_name in name_map: | |
225 | plugin_name = name_map[plugin_name] |
|
232 | plugin_name = name_map[plugin_name] | |
226 | enabled_plugin = 'auth_%s_enabled' % plugin_name |
|
233 | enabled_plugin = 'auth_%s_enabled' % plugin_name | |
227 | cache_ttl = 'auth_%s_cache_ttl' % plugin_name |
|
234 | cache_ttl = 'auth_%s_cache_ttl' % plugin_name | |
228 |
|
235 | |||
229 | # default params that are needed for each plugin, |
|
236 | # default params that are needed for each plugin, | |
230 | # `enabled` and `cache_ttl` |
|
237 | # `enabled` and `cache_ttl` | |
231 | params.update({ |
|
238 | params.update({ | |
232 | enabled_plugin: True, |
|
239 | enabled_plugin: True, | |
233 | cache_ttl: 0 |
|
240 | cache_ttl: 0 | |
234 | }) |
|
241 | }) | |
235 | if override.get: |
|
242 | if override.get: | |
236 | params.update(override.get(module, {})) |
|
243 | params.update(override.get(module, {})) | |
237 |
|
244 | |||
238 | validated_params = params |
|
245 | validated_params = params | |
239 | for k, v in validated_params.items(): |
|
246 | for k, v in validated_params.items(): | |
240 | setting = SettingsModel().create_or_update_setting(k, v) |
|
247 | setting = SettingsModel().create_or_update_setting(k, v) | |
241 | Session().add(setting) |
|
248 | Session().add(setting) | |
242 | Session().commit() |
|
249 | Session().commit() | |
243 |
|
250 | |||
244 | def cleanup(): |
|
251 | def cleanup(): | |
245 | _enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode']) |
|
252 | _enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode']) | |
246 |
|
253 | |||
247 | request.addfinalizer(cleanup) |
|
254 | request.addfinalizer(cleanup) | |
248 |
|
255 | |||
249 | return _enable_plugins |
|
256 | return _enable_plugins | |
250 |
|
257 | |||
251 |
|
258 | |||
252 | @pytest.fixture |
|
259 | @pytest.fixture | |
253 | def fs_repo_only(request, rhodecode_fixtures): |
|
260 | def fs_repo_only(request, rhodecode_fixtures): | |
254 | def fs_repo_fabric(repo_name, repo_type): |
|
261 | def fs_repo_fabric(repo_name, repo_type): | |
255 | rhodecode_fixtures.create_repo(repo_name, repo_type=repo_type) |
|
262 | rhodecode_fixtures.create_repo(repo_name, repo_type=repo_type) | |
256 | rhodecode_fixtures.destroy_repo(repo_name, fs_remove=False) |
|
263 | rhodecode_fixtures.destroy_repo(repo_name, fs_remove=False) | |
257 |
|
264 | |||
258 | def cleanup(): |
|
265 | def cleanup(): | |
259 | rhodecode_fixtures.destroy_repo(repo_name, fs_remove=True) |
|
266 | rhodecode_fixtures.destroy_repo(repo_name, fs_remove=True) | |
260 | rhodecode_fixtures.destroy_repo_on_filesystem(repo_name) |
|
267 | rhodecode_fixtures.destroy_repo_on_filesystem(repo_name) | |
261 |
|
268 | |||
262 | request.addfinalizer(cleanup) |
|
269 | request.addfinalizer(cleanup) | |
263 |
|
270 | |||
264 | return fs_repo_fabric |
|
271 | return fs_repo_fabric |
General Comments 0
You need to be logged in to leave comments.
Login now