Show More
The requested changes are too big and content was truncated. Show full diff
@@ -1,113 +1,112 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import pytest |
|
22 | 22 | |
|
23 | 23 | from rhodecode.model.db import UserLog |
|
24 | 24 | from rhodecode.model.pull_request import PullRequestModel |
|
25 | 25 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
26 | 26 | from rhodecode.api.tests.utils import ( |
|
27 | 27 | build_data, api_call, assert_error, assert_ok) |
|
28 | 28 | |
|
29 | 29 | |
|
30 | 30 | @pytest.mark.usefixtures("testuser_api", "app") |
|
31 | 31 | class TestClosePullRequest(object): |
|
32 | 32 | |
|
33 | 33 | @pytest.mark.backends("git", "hg") |
|
34 | 34 | def test_api_close_pull_request(self, pr_util): |
|
35 | 35 | pull_request = pr_util.create_pull_request() |
|
36 | 36 | pull_request_id = pull_request.pull_request_id |
|
37 | 37 | author = pull_request.user_id |
|
38 | 38 | repo = pull_request.target_repo.repo_id |
|
39 | 39 | id_, params = build_data( |
|
40 | 40 | self.apikey, 'close_pull_request', |
|
41 | 41 | repoid=pull_request.target_repo.repo_name, |
|
42 | 42 | pullrequestid=pull_request.pull_request_id) |
|
43 | 43 | response = api_call(self.app, params) |
|
44 | 44 | expected = { |
|
45 | 45 | 'pull_request_id': pull_request_id, |
|
46 | 46 | 'close_status': 'Rejected', |
|
47 | 47 | 'closed': True, |
|
48 | 48 | } |
|
49 | 49 | assert_ok(id_, expected, response.body) |
|
50 | action = 'user_closed_pull_request:%d' % pull_request_id | |
|
51 | 50 | journal = UserLog.query()\ |
|
52 | .filter(UserLog.user_id == author)\ | |
|
51 | .filter(UserLog.user_id == author) \ | |
|
52 | .order_by('user_log_id') \ | |
|
53 | 53 | .filter(UserLog.repository_id == repo)\ |
|
54 | .filter(UserLog.action == action)\ | |
|
55 | 54 | .all() |
|
56 | assert len(journal) == 1 | |
|
55 | assert journal[-1].action == 'repo.pull_request.close' | |
|
57 | 56 | |
|
58 | 57 | @pytest.mark.backends("git", "hg") |
|
59 | 58 | def test_api_close_pull_request_already_closed_error(self, pr_util): |
|
60 | 59 | pull_request = pr_util.create_pull_request() |
|
61 | 60 | pull_request_id = pull_request.pull_request_id |
|
62 | 61 | pull_request_repo = pull_request.target_repo.repo_name |
|
63 | 62 | PullRequestModel().close_pull_request( |
|
64 | 63 | pull_request, pull_request.author) |
|
65 | 64 | id_, params = build_data( |
|
66 | 65 | self.apikey, 'close_pull_request', |
|
67 | 66 | repoid=pull_request_repo, pullrequestid=pull_request_id) |
|
68 | 67 | response = api_call(self.app, params) |
|
69 | 68 | |
|
70 | 69 | expected = 'pull request `%s` is already closed' % pull_request_id |
|
71 | 70 | assert_error(id_, expected, given=response.body) |
|
72 | 71 | |
|
73 | 72 | @pytest.mark.backends("git", "hg") |
|
74 | 73 | def test_api_close_pull_request_repo_error(self): |
|
75 | 74 | id_, params = build_data( |
|
76 | 75 | self.apikey, 'close_pull_request', |
|
77 | 76 | repoid=666, pullrequestid=1) |
|
78 | 77 | response = api_call(self.app, params) |
|
79 | 78 | |
|
80 | 79 | expected = 'repository `666` does not exist' |
|
81 | 80 | assert_error(id_, expected, given=response.body) |
|
82 | 81 | |
|
83 | 82 | @pytest.mark.backends("git", "hg") |
|
84 | 83 | def test_api_close_pull_request_non_admin_with_userid_error(self, |
|
85 | 84 | pr_util): |
|
86 | 85 | pull_request = pr_util.create_pull_request() |
|
87 | 86 | id_, params = build_data( |
|
88 | 87 | self.apikey_regular, 'close_pull_request', |
|
89 | 88 | repoid=pull_request.target_repo.repo_name, |
|
90 | 89 | pullrequestid=pull_request.pull_request_id, |
|
91 | 90 | userid=TEST_USER_ADMIN_LOGIN) |
|
92 | 91 | response = api_call(self.app, params) |
|
93 | 92 | |
|
94 | 93 | expected = 'userid is not the same as your user' |
|
95 | 94 | assert_error(id_, expected, given=response.body) |
|
96 | 95 | |
|
97 | 96 | @pytest.mark.backends("git", "hg") |
|
98 | 97 | def test_api_close_pull_request_no_perms_to_close( |
|
99 | 98 | self, user_util, pr_util): |
|
100 | 99 | user = user_util.create_user() |
|
101 | 100 | pull_request = pr_util.create_pull_request() |
|
102 | 101 | |
|
103 | 102 | id_, params = build_data( |
|
104 | 103 | user.api_key, 'close_pull_request', |
|
105 | 104 | repoid=pull_request.target_repo.repo_name, |
|
106 | 105 | pullrequestid=pull_request.pull_request_id,) |
|
107 | 106 | response = api_call(self.app, params) |
|
108 | 107 | |
|
109 | 108 | expected = ('pull request `%s` close failed, ' |
|
110 | 109 | 'no permission to close.') % pull_request.pull_request_id |
|
111 | 110 | |
|
112 | 111 | response_json = response.json['error'] |
|
113 | 112 | assert response_json == expected |
@@ -1,209 +1,208 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import pytest |
|
22 | 22 | |
|
23 | 23 | from rhodecode.model.comment import CommentsModel |
|
24 | 24 | from rhodecode.model.db import UserLog |
|
25 | 25 | from rhodecode.model.pull_request import PullRequestModel |
|
26 | 26 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
27 | 27 | from rhodecode.api.tests.utils import ( |
|
28 | 28 | build_data, api_call, assert_error, assert_ok) |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | @pytest.mark.usefixtures("testuser_api", "app") |
|
32 | 32 | class TestCommentPullRequest(object): |
|
33 | 33 | finalizers = [] |
|
34 | 34 | |
|
35 | 35 | def teardown_method(self, method): |
|
36 | 36 | if self.finalizers: |
|
37 | 37 | for finalizer in self.finalizers: |
|
38 | 38 | finalizer() |
|
39 | 39 | self.finalizers = [] |
|
40 | 40 | |
|
41 | 41 | @pytest.mark.backends("git", "hg") |
|
42 | 42 | def test_api_comment_pull_request(self, pr_util, no_notifications): |
|
43 | 43 | pull_request = pr_util.create_pull_request() |
|
44 | 44 | pull_request_id = pull_request.pull_request_id |
|
45 | 45 | author = pull_request.user_id |
|
46 | 46 | repo = pull_request.target_repo.repo_id |
|
47 | 47 | id_, params = build_data( |
|
48 | 48 | self.apikey, 'comment_pull_request', |
|
49 | 49 | repoid=pull_request.target_repo.repo_name, |
|
50 | 50 | pullrequestid=pull_request.pull_request_id, |
|
51 | 51 | message='test message') |
|
52 | 52 | response = api_call(self.app, params) |
|
53 | 53 | pull_request = PullRequestModel().get(pull_request.pull_request_id) |
|
54 | 54 | |
|
55 | 55 | comments = CommentsModel().get_comments( |
|
56 | 56 | pull_request.target_repo.repo_id, pull_request=pull_request) |
|
57 | 57 | |
|
58 | 58 | expected = { |
|
59 | 59 | 'pull_request_id': pull_request.pull_request_id, |
|
60 | 60 | 'comment_id': comments[-1].comment_id, |
|
61 | 61 | 'status': {'given': None, 'was_changed': None} |
|
62 | 62 | } |
|
63 | 63 | assert_ok(id_, expected, response.body) |
|
64 | 64 | |
|
65 | action = 'user_commented_pull_request:%d' % pull_request_id | |
|
66 | 65 | journal = UserLog.query()\ |
|
67 | 66 | .filter(UserLog.user_id == author)\ |
|
68 | .filter(UserLog.repository_id == repo)\ | |
|
69 | .filter(UserLog.action == action)\ | |
|
67 | .filter(UserLog.repository_id == repo) \ | |
|
68 | .order_by('user_log_id') \ | |
|
70 | 69 | .all() |
|
71 | assert len(journal) == 2 | |
|
70 | assert journal[-1].action == 'repo.pull_request.comment.create' | |
|
72 | 71 | |
|
73 | 72 | @pytest.mark.backends("git", "hg") |
|
74 | 73 | def test_api_comment_pull_request_change_status( |
|
75 | 74 | self, pr_util, no_notifications): |
|
76 | 75 | pull_request = pr_util.create_pull_request() |
|
77 | 76 | pull_request_id = pull_request.pull_request_id |
|
78 | 77 | id_, params = build_data( |
|
79 | 78 | self.apikey, 'comment_pull_request', |
|
80 | 79 | repoid=pull_request.target_repo.repo_name, |
|
81 | 80 | pullrequestid=pull_request.pull_request_id, |
|
82 | 81 | status='rejected') |
|
83 | 82 | response = api_call(self.app, params) |
|
84 | 83 | pull_request = PullRequestModel().get(pull_request_id) |
|
85 | 84 | |
|
86 | 85 | comments = CommentsModel().get_comments( |
|
87 | 86 | pull_request.target_repo.repo_id, pull_request=pull_request) |
|
88 | 87 | expected = { |
|
89 | 88 | 'pull_request_id': pull_request.pull_request_id, |
|
90 | 89 | 'comment_id': comments[-1].comment_id, |
|
91 | 90 | 'status': {'given': 'rejected', 'was_changed': True} |
|
92 | 91 | } |
|
93 | 92 | assert_ok(id_, expected, response.body) |
|
94 | 93 | |
|
95 | 94 | @pytest.mark.backends("git", "hg") |
|
96 | 95 | def test_api_comment_pull_request_change_status_with_specific_commit_id( |
|
97 | 96 | self, pr_util, no_notifications): |
|
98 | 97 | pull_request = pr_util.create_pull_request() |
|
99 | 98 | pull_request_id = pull_request.pull_request_id |
|
100 | 99 | latest_commit_id = 'test_commit' |
|
101 | 100 | # inject additional revision, to fail test the status change on |
|
102 | 101 | # non-latest commit |
|
103 | 102 | pull_request.revisions = pull_request.revisions + ['test_commit'] |
|
104 | 103 | |
|
105 | 104 | id_, params = build_data( |
|
106 | 105 | self.apikey, 'comment_pull_request', |
|
107 | 106 | repoid=pull_request.target_repo.repo_name, |
|
108 | 107 | pullrequestid=pull_request.pull_request_id, |
|
109 | 108 | status='approved', commit_id=latest_commit_id) |
|
110 | 109 | response = api_call(self.app, params) |
|
111 | 110 | pull_request = PullRequestModel().get(pull_request_id) |
|
112 | 111 | |
|
113 | 112 | expected = { |
|
114 | 113 | 'pull_request_id': pull_request.pull_request_id, |
|
115 | 114 | 'comment_id': None, |
|
116 | 115 | 'status': {'given': 'approved', 'was_changed': False} |
|
117 | 116 | } |
|
118 | 117 | assert_ok(id_, expected, response.body) |
|
119 | 118 | |
|
120 | 119 | @pytest.mark.backends("git", "hg") |
|
121 | 120 | def test_api_comment_pull_request_change_status_with_specific_commit_id( |
|
122 | 121 | self, pr_util, no_notifications): |
|
123 | 122 | pull_request = pr_util.create_pull_request() |
|
124 | 123 | pull_request_id = pull_request.pull_request_id |
|
125 | 124 | latest_commit_id = pull_request.revisions[0] |
|
126 | 125 | |
|
127 | 126 | id_, params = build_data( |
|
128 | 127 | self.apikey, 'comment_pull_request', |
|
129 | 128 | repoid=pull_request.target_repo.repo_name, |
|
130 | 129 | pullrequestid=pull_request.pull_request_id, |
|
131 | 130 | status='approved', commit_id=latest_commit_id) |
|
132 | 131 | response = api_call(self.app, params) |
|
133 | 132 | pull_request = PullRequestModel().get(pull_request_id) |
|
134 | 133 | |
|
135 | 134 | comments = CommentsModel().get_comments( |
|
136 | 135 | pull_request.target_repo.repo_id, pull_request=pull_request) |
|
137 | 136 | expected = { |
|
138 | 137 | 'pull_request_id': pull_request.pull_request_id, |
|
139 | 138 | 'comment_id': comments[-1].comment_id, |
|
140 | 139 | 'status': {'given': 'approved', 'was_changed': True} |
|
141 | 140 | } |
|
142 | 141 | assert_ok(id_, expected, response.body) |
|
143 | 142 | |
|
144 | 143 | @pytest.mark.backends("git", "hg") |
|
145 | 144 | def test_api_comment_pull_request_missing_params_error(self, pr_util): |
|
146 | 145 | pull_request = pr_util.create_pull_request() |
|
147 | 146 | pull_request_id = pull_request.pull_request_id |
|
148 | 147 | pull_request_repo = pull_request.target_repo.repo_name |
|
149 | 148 | id_, params = build_data( |
|
150 | 149 | self.apikey, 'comment_pull_request', |
|
151 | 150 | repoid=pull_request_repo, |
|
152 | 151 | pullrequestid=pull_request_id) |
|
153 | 152 | response = api_call(self.app, params) |
|
154 | 153 | |
|
155 | 154 | expected = 'Both message and status parameters are missing. At least one is required.' |
|
156 | 155 | assert_error(id_, expected, given=response.body) |
|
157 | 156 | |
|
158 | 157 | @pytest.mark.backends("git", "hg") |
|
159 | 158 | def test_api_comment_pull_request_unknown_status_error(self, pr_util): |
|
160 | 159 | pull_request = pr_util.create_pull_request() |
|
161 | 160 | pull_request_id = pull_request.pull_request_id |
|
162 | 161 | pull_request_repo = pull_request.target_repo.repo_name |
|
163 | 162 | id_, params = build_data( |
|
164 | 163 | self.apikey, 'comment_pull_request', |
|
165 | 164 | repoid=pull_request_repo, |
|
166 | 165 | pullrequestid=pull_request_id, |
|
167 | 166 | status='42') |
|
168 | 167 | response = api_call(self.app, params) |
|
169 | 168 | |
|
170 | 169 | expected = 'Unknown comment status: `42`' |
|
171 | 170 | assert_error(id_, expected, given=response.body) |
|
172 | 171 | |
|
173 | 172 | @pytest.mark.backends("git", "hg") |
|
174 | 173 | def test_api_comment_pull_request_repo_error(self): |
|
175 | 174 | id_, params = build_data( |
|
176 | 175 | self.apikey, 'comment_pull_request', |
|
177 | 176 | repoid=666, pullrequestid=1) |
|
178 | 177 | response = api_call(self.app, params) |
|
179 | 178 | |
|
180 | 179 | expected = 'repository `666` does not exist' |
|
181 | 180 | assert_error(id_, expected, given=response.body) |
|
182 | 181 | |
|
183 | 182 | @pytest.mark.backends("git", "hg") |
|
184 | 183 | def test_api_comment_pull_request_non_admin_with_userid_error( |
|
185 | 184 | self, pr_util): |
|
186 | 185 | pull_request = pr_util.create_pull_request() |
|
187 | 186 | id_, params = build_data( |
|
188 | 187 | self.apikey_regular, 'comment_pull_request', |
|
189 | 188 | repoid=pull_request.target_repo.repo_name, |
|
190 | 189 | pullrequestid=pull_request.pull_request_id, |
|
191 | 190 | userid=TEST_USER_ADMIN_LOGIN) |
|
192 | 191 | response = api_call(self.app, params) |
|
193 | 192 | |
|
194 | 193 | expected = 'userid is not the same as your user' |
|
195 | 194 | assert_error(id_, expected, given=response.body) |
|
196 | 195 | |
|
197 | 196 | @pytest.mark.backends("git", "hg") |
|
198 | 197 | def test_api_comment_pull_request_wrong_commit_id_error(self, pr_util): |
|
199 | 198 | pull_request = pr_util.create_pull_request() |
|
200 | 199 | id_, params = build_data( |
|
201 | 200 | self.apikey_regular, 'comment_pull_request', |
|
202 | 201 | repoid=pull_request.target_repo.repo_name, |
|
203 | 202 | status='approved', |
|
204 | 203 | pullrequestid=pull_request.pull_request_id, |
|
205 | 204 | commit_id='XXX') |
|
206 | 205 | response = api_call(self.app, params) |
|
207 | 206 | |
|
208 | 207 | expected = 'Invalid commit_id `XXX` for this pull request.' |
|
209 | 208 | assert_error(id_, expected, given=response.body) |
@@ -1,134 +1,134 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import pytest |
|
23 | 23 | import urlobject |
|
24 | 24 | from pylons import url |
|
25 | 25 | |
|
26 | 26 | from rhodecode.api.tests.utils import ( |
|
27 | 27 | build_data, api_call, assert_error, assert_ok) |
|
28 | 28 | from rhodecode.lib.utils2 import safe_unicode |
|
29 | 29 | |
|
30 | 30 | pytestmark = pytest.mark.backends("git", "hg") |
|
31 | 31 | |
|
32 | 32 | |
|
33 | 33 | @pytest.mark.usefixtures("testuser_api", "app") |
|
34 | 34 | class TestGetPullRequest(object): |
|
35 | 35 | |
|
36 |
def test_api_get_pull_request(self, pr_util, |
|
|
36 | def test_api_get_pull_request(self, pr_util, http_host_only_stub): | |
|
37 | 37 | from rhodecode.model.pull_request import PullRequestModel |
|
38 | 38 | pull_request = pr_util.create_pull_request(mergeable=True) |
|
39 | 39 | id_, params = build_data( |
|
40 | 40 | self.apikey, 'get_pull_request', |
|
41 | 41 | repoid=pull_request.target_repo.repo_name, |
|
42 | 42 | pullrequestid=pull_request.pull_request_id) |
|
43 | 43 | |
|
44 | 44 | response = api_call(self.app, params) |
|
45 | 45 | |
|
46 | 46 | assert response.status == '200 OK' |
|
47 | 47 | |
|
48 | 48 | url_obj = urlobject.URLObject( |
|
49 | 49 | url( |
|
50 | 50 | 'pullrequest_show', |
|
51 | 51 | repo_name=pull_request.target_repo.repo_name, |
|
52 | 52 | pull_request_id=pull_request.pull_request_id, qualified=True)) |
|
53 | 53 | |
|
54 | 54 | pr_url = safe_unicode( |
|
55 | url_obj.with_netloc(http_host_stub)) | |
|
55 | url_obj.with_netloc(http_host_only_stub)) | |
|
56 | 56 | source_url = safe_unicode( |
|
57 | 57 | pull_request.source_repo.clone_url().with_netloc(http_host_only_stub)) |
|
58 | 58 | target_url = safe_unicode( |
|
59 | 59 | pull_request.target_repo.clone_url().with_netloc(http_host_only_stub)) |
|
60 | 60 | shadow_url = safe_unicode( |
|
61 | 61 | PullRequestModel().get_shadow_clone_url(pull_request)) |
|
62 | 62 | |
|
63 | 63 | expected = { |
|
64 | 64 | 'pull_request_id': pull_request.pull_request_id, |
|
65 | 65 | 'url': pr_url, |
|
66 | 66 | 'title': pull_request.title, |
|
67 | 67 | 'description': pull_request.description, |
|
68 | 68 | 'status': pull_request.status, |
|
69 | 69 | 'created_on': pull_request.created_on, |
|
70 | 70 | 'updated_on': pull_request.updated_on, |
|
71 | 71 | 'commit_ids': pull_request.revisions, |
|
72 | 72 | 'review_status': pull_request.calculated_review_status(), |
|
73 | 73 | 'mergeable': { |
|
74 | 74 | 'status': True, |
|
75 | 75 | 'message': 'This pull request can be automatically merged.', |
|
76 | 76 | }, |
|
77 | 77 | 'source': { |
|
78 | 78 | 'clone_url': source_url, |
|
79 | 79 | 'repository': pull_request.source_repo.repo_name, |
|
80 | 80 | 'reference': { |
|
81 | 81 | 'name': pull_request.source_ref_parts.name, |
|
82 | 82 | 'type': pull_request.source_ref_parts.type, |
|
83 | 83 | 'commit_id': pull_request.source_ref_parts.commit_id, |
|
84 | 84 | }, |
|
85 | 85 | }, |
|
86 | 86 | 'target': { |
|
87 | 87 | 'clone_url': target_url, |
|
88 | 88 | 'repository': pull_request.target_repo.repo_name, |
|
89 | 89 | 'reference': { |
|
90 | 90 | 'name': pull_request.target_ref_parts.name, |
|
91 | 91 | 'type': pull_request.target_ref_parts.type, |
|
92 | 92 | 'commit_id': pull_request.target_ref_parts.commit_id, |
|
93 | 93 | }, |
|
94 | 94 | }, |
|
95 | 95 | 'merge': { |
|
96 | 96 | 'clone_url': shadow_url, |
|
97 | 97 | 'reference': { |
|
98 | 98 | 'name': pull_request.shadow_merge_ref.name, |
|
99 | 99 | 'type': pull_request.shadow_merge_ref.type, |
|
100 | 100 | 'commit_id': pull_request.shadow_merge_ref.commit_id, |
|
101 | 101 | }, |
|
102 | 102 | }, |
|
103 | 103 | 'author': pull_request.author.get_api_data(include_secrets=False, |
|
104 | 104 | details='basic'), |
|
105 | 105 | 'reviewers': [ |
|
106 | 106 | { |
|
107 | 107 | 'user': reviewer.get_api_data(include_secrets=False, |
|
108 | 108 | details='basic'), |
|
109 | 109 | 'reasons': reasons, |
|
110 | 110 | 'review_status': st[0][1].status if st else 'not_reviewed', |
|
111 | 111 | } |
|
112 | 112 | for reviewer, reasons, mandatory, st in |
|
113 | 113 | pull_request.reviewers_statuses() |
|
114 | 114 | ] |
|
115 | 115 | } |
|
116 | 116 | assert_ok(id_, expected, response.body) |
|
117 | 117 | |
|
118 | 118 | def test_api_get_pull_request_repo_error(self): |
|
119 | 119 | id_, params = build_data( |
|
120 | 120 | self.apikey, 'get_pull_request', |
|
121 | 121 | repoid=666, pullrequestid=1) |
|
122 | 122 | response = api_call(self.app, params) |
|
123 | 123 | |
|
124 | 124 | expected = 'repository `666` does not exist' |
|
125 | 125 | assert_error(id_, expected, given=response.body) |
|
126 | 126 | |
|
127 | 127 | def test_api_get_pull_request_pull_request_error(self): |
|
128 | 128 | id_, params = build_data( |
|
129 | 129 | self.apikey, 'get_pull_request', |
|
130 | 130 | repoid=1, pullrequestid=666) |
|
131 | 131 | response = api_call(self.app, params) |
|
132 | 132 | |
|
133 | 133 | expected = 'pull request `666` does not exist' |
|
134 | 134 | assert_error(id_, expected, given=response.body) |
@@ -1,136 +1,136 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import pytest |
|
22 | 22 | |
|
23 | 23 | from rhodecode.model.db import UserLog, PullRequest |
|
24 | 24 | from rhodecode.model.meta import Session |
|
25 | 25 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
26 | 26 | from rhodecode.api.tests.utils import ( |
|
27 | 27 | build_data, api_call, assert_error, assert_ok) |
|
28 | 28 | |
|
29 | 29 | |
|
30 | 30 | @pytest.mark.usefixtures("testuser_api", "app") |
|
31 | 31 | class TestMergePullRequest(object): |
|
32 | 32 | @pytest.mark.backends("git", "hg") |
|
33 | 33 | def test_api_merge_pull_request_merge_failed(self, pr_util, no_notifications): |
|
34 | 34 | pull_request = pr_util.create_pull_request(mergeable=True) |
|
35 | 35 | author = pull_request.user_id |
|
36 | 36 | repo = pull_request.target_repo.repo_id |
|
37 | 37 | pull_request_id = pull_request.pull_request_id |
|
38 | 38 | pull_request_repo = pull_request.target_repo.repo_name |
|
39 | 39 | |
|
40 | 40 | id_, params = build_data( |
|
41 | 41 | self.apikey, 'merge_pull_request', |
|
42 | 42 | repoid=pull_request_repo, |
|
43 | 43 | pullrequestid=pull_request_id) |
|
44 | 44 | |
|
45 | 45 | response = api_call(self.app, params) |
|
46 | 46 | |
|
47 | 47 | # The above api call detaches the pull request DB object from the |
|
48 | 48 | # session because of an unconditional transaction rollback in our |
|
49 | 49 | # middleware. Therefore we need to add it back here if we want to use |
|
50 | 50 | # it. |
|
51 | 51 | Session().add(pull_request) |
|
52 | 52 | |
|
53 | 53 | expected = 'merge not possible for following reasons: ' \ |
|
54 | 54 | 'Pull request reviewer approval is pending.' |
|
55 | 55 | assert_error(id_, expected, given=response.body) |
|
56 | 56 | |
|
57 | 57 | @pytest.mark.backends("git", "hg") |
|
58 | 58 | def test_api_merge_pull_request(self, pr_util, no_notifications): |
|
59 | 59 | pull_request = pr_util.create_pull_request(mergeable=True, approved=True) |
|
60 | 60 | author = pull_request.user_id |
|
61 | 61 | repo = pull_request.target_repo.repo_id |
|
62 | 62 | pull_request_id = pull_request.pull_request_id |
|
63 | 63 | pull_request_repo = pull_request.target_repo.repo_name |
|
64 | 64 | |
|
65 | 65 | id_, params = build_data( |
|
66 | 66 | self.apikey, 'comment_pull_request', |
|
67 | 67 | repoid=pull_request_repo, |
|
68 | 68 | pullrequestid=pull_request_id, |
|
69 | 69 | status='approved') |
|
70 | 70 | |
|
71 | 71 | response = api_call(self.app, params) |
|
72 | 72 | expected = { |
|
73 | 73 | 'comment_id': response.json.get('result', {}).get('comment_id'), |
|
74 | 74 | 'pull_request_id': pull_request_id, |
|
75 | 75 | 'status': {'given': 'approved', 'was_changed': True} |
|
76 | 76 | } |
|
77 | 77 | assert_ok(id_, expected, given=response.body) |
|
78 | 78 | |
|
79 | 79 | id_, params = build_data( |
|
80 | 80 | self.apikey, 'merge_pull_request', |
|
81 | 81 | repoid=pull_request_repo, |
|
82 | 82 | pullrequestid=pull_request_id) |
|
83 | 83 | |
|
84 | 84 | response = api_call(self.app, params) |
|
85 | 85 | |
|
86 | 86 | pull_request = PullRequest.get(pull_request_id) |
|
87 | 87 | |
|
88 | 88 | expected = { |
|
89 | 89 | 'executed': True, |
|
90 | 90 | 'failure_reason': 0, |
|
91 | 91 | 'possible': True, |
|
92 | 92 | 'merge_commit_id': pull_request.shadow_merge_ref.commit_id, |
|
93 | 93 | 'merge_ref': pull_request.shadow_merge_ref._asdict() |
|
94 | 94 | } |
|
95 | 95 | |
|
96 | 96 | assert_ok(id_, expected, response.body) |
|
97 | 97 | |
|
98 | action = 'user_merged_pull_request:%d' % (pull_request_id, ) | |
|
99 | 98 | journal = UserLog.query()\ |
|
100 | 99 | .filter(UserLog.user_id == author)\ |
|
101 | .filter(UserLog.repository_id == repo)\ | |
|
102 | .filter(UserLog.action == action)\ | |
|
100 | .filter(UserLog.repository_id == repo) \ | |
|
101 | .order_by('user_log_id') \ | |
|
103 | 102 | .all() |
|
104 | assert len(journal) == 1 | |
|
103 | assert journal[-2].action == 'repo.pull_request.merge' | |
|
104 | assert journal[-1].action == 'repo.pull_request.close' | |
|
105 | 105 | |
|
106 | 106 | id_, params = build_data( |
|
107 | 107 | self.apikey, 'merge_pull_request', |
|
108 | 108 | repoid=pull_request_repo, pullrequestid=pull_request_id) |
|
109 | 109 | response = api_call(self.app, params) |
|
110 | 110 | |
|
111 | 111 | expected = 'merge not possible for following reasons: This pull request is closed.' |
|
112 | 112 | assert_error(id_, expected, given=response.body) |
|
113 | 113 | |
|
114 | 114 | @pytest.mark.backends("git", "hg") |
|
115 | 115 | def test_api_merge_pull_request_repo_error(self): |
|
116 | 116 | id_, params = build_data( |
|
117 | 117 | self.apikey, 'merge_pull_request', |
|
118 | 118 | repoid=666, pullrequestid=1) |
|
119 | 119 | response = api_call(self.app, params) |
|
120 | 120 | |
|
121 | 121 | expected = 'repository `666` does not exist' |
|
122 | 122 | assert_error(id_, expected, given=response.body) |
|
123 | 123 | |
|
124 | 124 | @pytest.mark.backends("git", "hg") |
|
125 | 125 | def test_api_merge_pull_request_non_admin_with_userid_error(self, |
|
126 | 126 | pr_util): |
|
127 | 127 | pull_request = pr_util.create_pull_request(mergeable=True) |
|
128 | 128 | id_, params = build_data( |
|
129 | 129 | self.apikey_regular, 'merge_pull_request', |
|
130 | 130 | repoid=pull_request.target_repo.repo_name, |
|
131 | 131 | pullrequestid=pull_request.pull_request_id, |
|
132 | 132 | userid=TEST_USER_ADMIN_LOGIN) |
|
133 | 133 | response = api_call(self.app, params) |
|
134 | 134 | |
|
135 | 135 | expected = 'userid is not the same as your user' |
|
136 | 136 | assert_error(id_, expected, given=response.body) |
@@ -1,213 +1,212 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import pytest |
|
22 | 22 | |
|
23 | 23 | from rhodecode.lib.vcs.nodes import FileNode |
|
24 | 24 | from rhodecode.model.db import User |
|
25 | 25 | from rhodecode.model.pull_request import PullRequestModel |
|
26 | 26 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
27 | 27 | from rhodecode.api.tests.utils import ( |
|
28 | 28 | build_data, api_call, assert_ok, assert_error) |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | @pytest.mark.usefixtures("testuser_api", "app") |
|
32 | 32 | class TestUpdatePullRequest(object): |
|
33 | 33 | |
|
34 | 34 | @pytest.mark.backends("git", "hg") |
|
35 | 35 | def test_api_update_pull_request_title_or_description( |
|
36 |
self, pr_util, |
|
|
36 | self, pr_util, no_notifications): | |
|
37 | 37 | pull_request = pr_util.create_pull_request() |
|
38 | 38 | |
|
39 | 39 | id_, params = build_data( |
|
40 | 40 | self.apikey, 'update_pull_request', |
|
41 | 41 | repoid=pull_request.target_repo.repo_name, |
|
42 | 42 | pullrequestid=pull_request.pull_request_id, |
|
43 | 43 | title='New TITLE OF A PR', |
|
44 | 44 | description='New DESC OF A PR', |
|
45 | 45 | ) |
|
46 | 46 | response = api_call(self.app, params) |
|
47 | 47 | |
|
48 | 48 | expected = { |
|
49 | 49 | "msg": "Updated pull request `{}`".format( |
|
50 | 50 | pull_request.pull_request_id), |
|
51 | 51 | "pull_request": response.json['result']['pull_request'], |
|
52 | 52 | "updated_commits": {"added": [], "common": [], "removed": []}, |
|
53 | 53 | "updated_reviewers": {"added": [], "removed": []}, |
|
54 | 54 | } |
|
55 | 55 | |
|
56 | 56 | response_json = response.json['result'] |
|
57 | 57 | assert response_json == expected |
|
58 | 58 | pr = response_json['pull_request'] |
|
59 | 59 | assert pr['title'] == 'New TITLE OF A PR' |
|
60 | 60 | assert pr['description'] == 'New DESC OF A PR' |
|
61 | 61 | |
|
62 | 62 | @pytest.mark.backends("git", "hg") |
|
63 | 63 | def test_api_try_update_closed_pull_request( |
|
64 |
self, pr_util, |
|
|
64 | self, pr_util, no_notifications): | |
|
65 | 65 | pull_request = pr_util.create_pull_request() |
|
66 | 66 | PullRequestModel().close_pull_request( |
|
67 | 67 | pull_request, TEST_USER_ADMIN_LOGIN) |
|
68 | 68 | |
|
69 | 69 | id_, params = build_data( |
|
70 | 70 | self.apikey, 'update_pull_request', |
|
71 | 71 | repoid=pull_request.target_repo.repo_name, |
|
72 | 72 | pullrequestid=pull_request.pull_request_id) |
|
73 | 73 | response = api_call(self.app, params) |
|
74 | 74 | |
|
75 | 75 | expected = 'pull request `{}` update failed, pull request ' \ |
|
76 | 76 | 'is closed'.format(pull_request.pull_request_id) |
|
77 | 77 | |
|
78 | 78 | assert_error(id_, expected, response.body) |
|
79 | 79 | |
|
80 | 80 | @pytest.mark.backends("git", "hg") |
|
81 | def test_api_update_update_commits( | |
|
82 | self, pr_util, silence_action_logger, no_notifications): | |
|
81 | def test_api_update_update_commits(self, pr_util, no_notifications): | |
|
83 | 82 | commits = [ |
|
84 | 83 | {'message': 'a'}, |
|
85 | 84 | {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]}, |
|
86 | 85 | {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]}, |
|
87 | 86 | ] |
|
88 | 87 | pull_request = pr_util.create_pull_request( |
|
89 | 88 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
90 | 89 | pr_util.update_source_repository(head='c') |
|
91 | 90 | repo = pull_request.source_repo.scm_instance() |
|
92 | 91 | commits = [x for x in repo.get_commits()] |
|
93 | 92 | print commits |
|
94 | 93 | |
|
95 | 94 | added_commit_id = commits[-1].raw_id # c commit |
|
96 | 95 | common_commit_id = commits[1].raw_id # b commit is common ancestor |
|
97 | 96 | total_commits = [added_commit_id, common_commit_id] |
|
98 | 97 | |
|
99 | 98 | id_, params = build_data( |
|
100 | 99 | self.apikey, 'update_pull_request', |
|
101 | 100 | repoid=pull_request.target_repo.repo_name, |
|
102 | 101 | pullrequestid=pull_request.pull_request_id, |
|
103 | 102 | update_commits=True |
|
104 | 103 | ) |
|
105 | 104 | response = api_call(self.app, params) |
|
106 | 105 | |
|
107 | 106 | expected = { |
|
108 | 107 | "msg": "Updated pull request `{}`".format( |
|
109 | 108 | pull_request.pull_request_id), |
|
110 | 109 | "pull_request": response.json['result']['pull_request'], |
|
111 | 110 | "updated_commits": {"added": [added_commit_id], |
|
112 | 111 | "common": [common_commit_id], |
|
113 | 112 | "total": total_commits, |
|
114 | 113 | "removed": []}, |
|
115 | 114 | "updated_reviewers": {"added": [], "removed": []}, |
|
116 | 115 | } |
|
117 | 116 | |
|
118 | 117 | assert_ok(id_, expected, response.body) |
|
119 | 118 | |
|
120 | 119 | @pytest.mark.backends("git", "hg") |
|
121 | 120 | def test_api_update_change_reviewers( |
|
122 |
self, user_util, pr_util, |
|
|
121 | self, user_util, pr_util, no_notifications): | |
|
123 | 122 | a = user_util.create_user() |
|
124 | 123 | b = user_util.create_user() |
|
125 | 124 | c = user_util.create_user() |
|
126 | 125 | new_reviewers = [ |
|
127 | 126 | {'username': b.username,'reasons': ['updated via API'], |
|
128 | 127 | 'mandatory':False}, |
|
129 | 128 | {'username': c.username, 'reasons': ['updated via API'], |
|
130 | 129 | 'mandatory':False}, |
|
131 | 130 | ] |
|
132 | 131 | |
|
133 | 132 | added = [b.username, c.username] |
|
134 | 133 | removed = [a.username] |
|
135 | 134 | |
|
136 | 135 | pull_request = pr_util.create_pull_request( |
|
137 | 136 | reviewers=[(a.username, ['added via API'], False)]) |
|
138 | 137 | |
|
139 | 138 | id_, params = build_data( |
|
140 | 139 | self.apikey, 'update_pull_request', |
|
141 | 140 | repoid=pull_request.target_repo.repo_name, |
|
142 | 141 | pullrequestid=pull_request.pull_request_id, |
|
143 | 142 | reviewers=new_reviewers) |
|
144 | 143 | response = api_call(self.app, params) |
|
145 | 144 | expected = { |
|
146 | 145 | "msg": "Updated pull request `{}`".format( |
|
147 | 146 | pull_request.pull_request_id), |
|
148 | 147 | "pull_request": response.json['result']['pull_request'], |
|
149 | 148 | "updated_commits": {"added": [], "common": [], "removed": []}, |
|
150 | 149 | "updated_reviewers": {"added": added, "removed": removed}, |
|
151 | 150 | } |
|
152 | 151 | |
|
153 | 152 | assert_ok(id_, expected, response.body) |
|
154 | 153 | |
|
155 | 154 | @pytest.mark.backends("git", "hg") |
|
156 | 155 | def test_api_update_bad_user_in_reviewers(self, pr_util): |
|
157 | 156 | pull_request = pr_util.create_pull_request() |
|
158 | 157 | |
|
159 | 158 | id_, params = build_data( |
|
160 | 159 | self.apikey, 'update_pull_request', |
|
161 | 160 | repoid=pull_request.target_repo.repo_name, |
|
162 | 161 | pullrequestid=pull_request.pull_request_id, |
|
163 | 162 | reviewers=[{'username': 'bad_name'}]) |
|
164 | 163 | response = api_call(self.app, params) |
|
165 | 164 | |
|
166 | 165 | expected = 'user `bad_name` does not exist' |
|
167 | 166 | |
|
168 | 167 | assert_error(id_, expected, response.body) |
|
169 | 168 | |
|
170 | 169 | @pytest.mark.backends("git", "hg") |
|
171 | 170 | def test_api_update_repo_error(self, pr_util): |
|
172 | 171 | id_, params = build_data( |
|
173 | 172 | self.apikey, 'update_pull_request', |
|
174 | 173 | repoid='fake', |
|
175 | 174 | pullrequestid='fake', |
|
176 | 175 | reviewers=[{'username': 'bad_name'}]) |
|
177 | 176 | response = api_call(self.app, params) |
|
178 | 177 | |
|
179 | 178 | expected = 'repository `fake` does not exist' |
|
180 | 179 | |
|
181 | 180 | response_json = response.json['error'] |
|
182 | 181 | assert response_json == expected |
|
183 | 182 | |
|
184 | 183 | @pytest.mark.backends("git", "hg") |
|
185 | 184 | def test_api_update_pull_request_error(self, pr_util): |
|
186 | 185 | pull_request = pr_util.create_pull_request() |
|
187 | 186 | |
|
188 | 187 | id_, params = build_data( |
|
189 | 188 | self.apikey, 'update_pull_request', |
|
190 | 189 | repoid=pull_request.target_repo.repo_name, |
|
191 | 190 | pullrequestid=999999, |
|
192 | 191 | reviewers=[{'username': 'bad_name'}]) |
|
193 | 192 | response = api_call(self.app, params) |
|
194 | 193 | |
|
195 | 194 | expected = 'pull request `999999` does not exist' |
|
196 | 195 | assert_error(id_, expected, response.body) |
|
197 | 196 | |
|
198 | 197 | @pytest.mark.backends("git", "hg") |
|
199 | 198 | def test_api_update_pull_request_no_perms_to_update( |
|
200 | 199 | self, user_util, pr_util): |
|
201 | 200 | user = user_util.create_user() |
|
202 | 201 | pull_request = pr_util.create_pull_request() |
|
203 | 202 | |
|
204 | 203 | id_, params = build_data( |
|
205 | 204 | user.api_key, 'update_pull_request', |
|
206 | 205 | repoid=pull_request.target_repo.repo_name, |
|
207 | 206 | pullrequestid=pull_request.pull_request_id,) |
|
208 | 207 | response = api_call(self.app, params) |
|
209 | 208 | |
|
210 | 209 | expected = ('pull request `%s` update failed, ' |
|
211 | 210 | 'no permission to update.') % pull_request.pull_request_id |
|
212 | 211 | |
|
213 | 212 | assert_error(id_, expected, response.body) |
@@ -1,779 +1,779 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import logging |
|
23 | 23 | |
|
24 | 24 | from rhodecode import events |
|
25 | 25 | from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError |
|
26 | 26 | from rhodecode.api.utils import ( |
|
27 | 27 | has_superadmin_permission, Optional, OAttr, get_repo_or_error, |
|
28 | 28 | get_pull_request_or_error, get_commit_or_error, get_user_or_error, |
|
29 | 29 | validate_repo_permissions, resolve_ref_or_error) |
|
30 | 30 | from rhodecode.lib.auth import (HasRepoPermissionAnyApi) |
|
31 | 31 | from rhodecode.lib.base import vcs_operation_context |
|
32 | 32 | from rhodecode.lib.utils2 import str2bool |
|
33 | 33 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
34 | 34 | from rhodecode.model.comment import CommentsModel |
|
35 | 35 | from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment |
|
36 | 36 | from rhodecode.model.pull_request import PullRequestModel, MergeCheck |
|
37 | 37 | from rhodecode.model.settings import SettingsModel |
|
38 | 38 | from rhodecode.model.validation_schema import Invalid |
|
39 | 39 | from rhodecode.model.validation_schema.schemas.reviewer_schema import( |
|
40 | 40 | ReviewerListSchema) |
|
41 | 41 | |
|
42 | 42 | log = logging.getLogger(__name__) |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | @jsonrpc_method() |
|
46 | 46 | def get_pull_request(request, apiuser, repoid, pullrequestid): |
|
47 | 47 | """ |
|
48 | 48 | Get a pull request based on the given ID. |
|
49 | 49 | |
|
50 | 50 | :param apiuser: This is filled automatically from the |authtoken|. |
|
51 | 51 | :type apiuser: AuthUser |
|
52 | 52 | :param repoid: Repository name or repository ID from where the pull |
|
53 | 53 | request was opened. |
|
54 | 54 | :type repoid: str or int |
|
55 | 55 | :param pullrequestid: ID of the requested pull request. |
|
56 | 56 | :type pullrequestid: int |
|
57 | 57 | |
|
58 | 58 | Example output: |
|
59 | 59 | |
|
60 | 60 | .. code-block:: bash |
|
61 | 61 | |
|
62 | 62 | "id": <id_given_in_input>, |
|
63 | 63 | "result": |
|
64 | 64 | { |
|
65 | 65 | "pull_request_id": "<pull_request_id>", |
|
66 | 66 | "url": "<url>", |
|
67 | 67 | "title": "<title>", |
|
68 | 68 | "description": "<description>", |
|
69 | 69 | "status" : "<status>", |
|
70 | 70 | "created_on": "<date_time_created>", |
|
71 | 71 | "updated_on": "<date_time_updated>", |
|
72 | 72 | "commit_ids": [ |
|
73 | 73 | ... |
|
74 | 74 | "<commit_id>", |
|
75 | 75 | "<commit_id>", |
|
76 | 76 | ... |
|
77 | 77 | ], |
|
78 | 78 | "review_status": "<review_status>", |
|
79 | 79 | "mergeable": { |
|
80 | 80 | "status": "<bool>", |
|
81 | 81 | "message": "<message>", |
|
82 | 82 | }, |
|
83 | 83 | "source": { |
|
84 | 84 | "clone_url": "<clone_url>", |
|
85 | 85 | "repository": "<repository_name>", |
|
86 | 86 | "reference": |
|
87 | 87 | { |
|
88 | 88 | "name": "<name>", |
|
89 | 89 | "type": "<type>", |
|
90 | 90 | "commit_id": "<commit_id>", |
|
91 | 91 | } |
|
92 | 92 | }, |
|
93 | 93 | "target": { |
|
94 | 94 | "clone_url": "<clone_url>", |
|
95 | 95 | "repository": "<repository_name>", |
|
96 | 96 | "reference": |
|
97 | 97 | { |
|
98 | 98 | "name": "<name>", |
|
99 | 99 | "type": "<type>", |
|
100 | 100 | "commit_id": "<commit_id>", |
|
101 | 101 | } |
|
102 | 102 | }, |
|
103 | 103 | "merge": { |
|
104 | 104 | "clone_url": "<clone_url>", |
|
105 | 105 | "reference": |
|
106 | 106 | { |
|
107 | 107 | "name": "<name>", |
|
108 | 108 | "type": "<type>", |
|
109 | 109 | "commit_id": "<commit_id>", |
|
110 | 110 | } |
|
111 | 111 | }, |
|
112 | 112 | "author": <user_obj>, |
|
113 | 113 | "reviewers": [ |
|
114 | 114 | ... |
|
115 | 115 | { |
|
116 | 116 | "user": "<user_obj>", |
|
117 | 117 | "review_status": "<review_status>", |
|
118 | 118 | } |
|
119 | 119 | ... |
|
120 | 120 | ] |
|
121 | 121 | }, |
|
122 | 122 | "error": null |
|
123 | 123 | """ |
|
124 | 124 | get_repo_or_error(repoid) |
|
125 | 125 | pull_request = get_pull_request_or_error(pullrequestid) |
|
126 | 126 | if not PullRequestModel().check_user_read( |
|
127 | 127 | pull_request, apiuser, api=True): |
|
128 | 128 | raise JSONRPCError('repository `%s` does not exist' % (repoid,)) |
|
129 | 129 | data = pull_request.get_api_data() |
|
130 | 130 | return data |
|
131 | 131 | |
|
132 | 132 | |
|
133 | 133 | @jsonrpc_method() |
|
134 | 134 | def get_pull_requests(request, apiuser, repoid, status=Optional('new')): |
|
135 | 135 | """ |
|
136 | 136 | Get all pull requests from the repository specified in `repoid`. |
|
137 | 137 | |
|
138 | 138 | :param apiuser: This is filled automatically from the |authtoken|. |
|
139 | 139 | :type apiuser: AuthUser |
|
140 | 140 | :param repoid: Repository name or repository ID. |
|
141 | 141 | :type repoid: str or int |
|
142 | 142 | :param status: Only return pull requests with the specified status. |
|
143 | 143 | Valid options are. |
|
144 | 144 | * ``new`` (default) |
|
145 | 145 | * ``open`` |
|
146 | 146 | * ``closed`` |
|
147 | 147 | :type status: str |
|
148 | 148 | |
|
149 | 149 | Example output: |
|
150 | 150 | |
|
151 | 151 | .. code-block:: bash |
|
152 | 152 | |
|
153 | 153 | "id": <id_given_in_input>, |
|
154 | 154 | "result": |
|
155 | 155 | [ |
|
156 | 156 | ... |
|
157 | 157 | { |
|
158 | 158 | "pull_request_id": "<pull_request_id>", |
|
159 | 159 | "url": "<url>", |
|
160 | 160 | "title" : "<title>", |
|
161 | 161 | "description": "<description>", |
|
162 | 162 | "status": "<status>", |
|
163 | 163 | "created_on": "<date_time_created>", |
|
164 | 164 | "updated_on": "<date_time_updated>", |
|
165 | 165 | "commit_ids": [ |
|
166 | 166 | ... |
|
167 | 167 | "<commit_id>", |
|
168 | 168 | "<commit_id>", |
|
169 | 169 | ... |
|
170 | 170 | ], |
|
171 | 171 | "review_status": "<review_status>", |
|
172 | 172 | "mergeable": { |
|
173 | 173 | "status": "<bool>", |
|
174 | 174 | "message: "<message>", |
|
175 | 175 | }, |
|
176 | 176 | "source": { |
|
177 | 177 | "clone_url": "<clone_url>", |
|
178 | 178 | "reference": |
|
179 | 179 | { |
|
180 | 180 | "name": "<name>", |
|
181 | 181 | "type": "<type>", |
|
182 | 182 | "commit_id": "<commit_id>", |
|
183 | 183 | } |
|
184 | 184 | }, |
|
185 | 185 | "target": { |
|
186 | 186 | "clone_url": "<clone_url>", |
|
187 | 187 | "reference": |
|
188 | 188 | { |
|
189 | 189 | "name": "<name>", |
|
190 | 190 | "type": "<type>", |
|
191 | 191 | "commit_id": "<commit_id>", |
|
192 | 192 | } |
|
193 | 193 | }, |
|
194 | 194 | "merge": { |
|
195 | 195 | "clone_url": "<clone_url>", |
|
196 | 196 | "reference": |
|
197 | 197 | { |
|
198 | 198 | "name": "<name>", |
|
199 | 199 | "type": "<type>", |
|
200 | 200 | "commit_id": "<commit_id>", |
|
201 | 201 | } |
|
202 | 202 | }, |
|
203 | 203 | "author": <user_obj>, |
|
204 | 204 | "reviewers": [ |
|
205 | 205 | ... |
|
206 | 206 | { |
|
207 | 207 | "user": "<user_obj>", |
|
208 | 208 | "review_status": "<review_status>", |
|
209 | 209 | } |
|
210 | 210 | ... |
|
211 | 211 | ] |
|
212 | 212 | } |
|
213 | 213 | ... |
|
214 | 214 | ], |
|
215 | 215 | "error": null |
|
216 | 216 | |
|
217 | 217 | """ |
|
218 | 218 | repo = get_repo_or_error(repoid) |
|
219 | 219 | if not has_superadmin_permission(apiuser): |
|
220 | 220 | _perms = ( |
|
221 | 221 | 'repository.admin', 'repository.write', 'repository.read',) |
|
222 | 222 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
223 | 223 | |
|
224 | 224 | status = Optional.extract(status) |
|
225 | 225 | pull_requests = PullRequestModel().get_all(repo, statuses=[status]) |
|
226 | 226 | data = [pr.get_api_data() for pr in pull_requests] |
|
227 | 227 | return data |
|
228 | 228 | |
|
229 | 229 | |
|
230 | 230 | @jsonrpc_method() |
|
231 | 231 | def merge_pull_request( |
|
232 | 232 | request, apiuser, repoid, pullrequestid, |
|
233 | 233 | userid=Optional(OAttr('apiuser'))): |
|
234 | 234 | """ |
|
235 | 235 | Merge the pull request specified by `pullrequestid` into its target |
|
236 | 236 | repository. |
|
237 | 237 | |
|
238 | 238 | :param apiuser: This is filled automatically from the |authtoken|. |
|
239 | 239 | :type apiuser: AuthUser |
|
240 | 240 | :param repoid: The Repository name or repository ID of the |
|
241 | 241 | target repository to which the |pr| is to be merged. |
|
242 | 242 | :type repoid: str or int |
|
243 | 243 | :param pullrequestid: ID of the pull request which shall be merged. |
|
244 | 244 | :type pullrequestid: int |
|
245 | 245 | :param userid: Merge the pull request as this user. |
|
246 | 246 | :type userid: Optional(str or int) |
|
247 | 247 | |
|
248 | 248 | Example output: |
|
249 | 249 | |
|
250 | 250 | .. code-block:: bash |
|
251 | 251 | |
|
252 | 252 | "id": <id_given_in_input>, |
|
253 | 253 | "result": { |
|
254 | 254 | "executed": "<bool>", |
|
255 | 255 | "failure_reason": "<int>", |
|
256 | 256 | "merge_commit_id": "<merge_commit_id>", |
|
257 | 257 | "possible": "<bool>", |
|
258 | 258 | "merge_ref": { |
|
259 | 259 | "commit_id": "<commit_id>", |
|
260 | 260 | "type": "<type>", |
|
261 | 261 | "name": "<name>" |
|
262 | 262 | } |
|
263 | 263 | }, |
|
264 | 264 | "error": null |
|
265 | 265 | """ |
|
266 | 266 | repo = get_repo_or_error(repoid) |
|
267 | 267 | if not isinstance(userid, Optional): |
|
268 | 268 | if (has_superadmin_permission(apiuser) or |
|
269 | 269 | HasRepoPermissionAnyApi('repository.admin')( |
|
270 | 270 | user=apiuser, repo_name=repo.repo_name)): |
|
271 | 271 | apiuser = get_user_or_error(userid) |
|
272 | 272 | else: |
|
273 | 273 | raise JSONRPCError('userid is not the same as your user') |
|
274 | 274 | |
|
275 | 275 | pull_request = get_pull_request_or_error(pullrequestid) |
|
276 | 276 | |
|
277 | 277 | check = MergeCheck.validate(pull_request, user=apiuser) |
|
278 | 278 | merge_possible = not check.failed |
|
279 | 279 | |
|
280 | 280 | if not merge_possible: |
|
281 | 281 | error_messages = [] |
|
282 | 282 | for err_type, error_msg in check.errors: |
|
283 | 283 | error_msg = request.translate(error_msg) |
|
284 | 284 | error_messages.append(error_msg) |
|
285 | 285 | |
|
286 | 286 | reasons = ','.join(error_messages) |
|
287 | 287 | raise JSONRPCError( |
|
288 | 288 | 'merge not possible for following reasons: {}'.format(reasons)) |
|
289 | 289 | |
|
290 | 290 | target_repo = pull_request.target_repo |
|
291 | 291 | extras = vcs_operation_context( |
|
292 | 292 | request.environ, repo_name=target_repo.repo_name, |
|
293 | 293 | username=apiuser.username, action='push', |
|
294 | 294 | scm=target_repo.repo_type) |
|
295 | 295 | merge_response = PullRequestModel().merge( |
|
296 | 296 | pull_request, apiuser, extras=extras) |
|
297 | 297 | if merge_response.executed: |
|
298 | 298 | PullRequestModel().close_pull_request( |
|
299 | 299 | pull_request.pull_request_id, apiuser) |
|
300 | 300 | |
|
301 | 301 | Session().commit() |
|
302 | 302 | |
|
303 | 303 | # In previous versions the merge response directly contained the merge |
|
304 | 304 | # commit id. It is now contained in the merge reference object. To be |
|
305 | 305 | # backwards compatible we have to extract it again. |
|
306 | 306 | merge_response = merge_response._asdict() |
|
307 | 307 | merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id |
|
308 | 308 | |
|
309 | 309 | return merge_response |
|
310 | 310 | |
|
311 | 311 | |
|
312 | 312 | @jsonrpc_method() |
|
313 | 313 | def comment_pull_request( |
|
314 | 314 | request, apiuser, repoid, pullrequestid, message=Optional(None), |
|
315 | 315 | commit_id=Optional(None), status=Optional(None), |
|
316 | 316 | comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE), |
|
317 | 317 | resolves_comment_id=Optional(None), |
|
318 | 318 | userid=Optional(OAttr('apiuser'))): |
|
319 | 319 | """ |
|
320 | 320 | Comment on the pull request specified with the `pullrequestid`, |
|
321 | 321 | in the |repo| specified by the `repoid`, and optionally change the |
|
322 | 322 | review status. |
|
323 | 323 | |
|
324 | 324 | :param apiuser: This is filled automatically from the |authtoken|. |
|
325 | 325 | :type apiuser: AuthUser |
|
326 | 326 | :param repoid: The repository name or repository ID. |
|
327 | 327 | :type repoid: str or int |
|
328 | 328 | :param pullrequestid: The pull request ID. |
|
329 | 329 | :type pullrequestid: int |
|
330 | 330 | :param commit_id: Specify the commit_id for which to set a comment. If |
|
331 | 331 | given commit_id is different than latest in the PR status |
|
332 | 332 | change won't be performed. |
|
333 | 333 | :type commit_id: str |
|
334 | 334 | :param message: The text content of the comment. |
|
335 | 335 | :type message: str |
|
336 | 336 | :param status: (**Optional**) Set the approval status of the pull |
|
337 | 337 | request. One of: 'not_reviewed', 'approved', 'rejected', |
|
338 | 338 | 'under_review' |
|
339 | 339 | :type status: str |
|
340 | 340 | :param comment_type: Comment type, one of: 'note', 'todo' |
|
341 | 341 | :type comment_type: Optional(str), default: 'note' |
|
342 | 342 | :param userid: Comment on the pull request as this user |
|
343 | 343 | :type userid: Optional(str or int) |
|
344 | 344 | |
|
345 | 345 | Example output: |
|
346 | 346 | |
|
347 | 347 | .. code-block:: bash |
|
348 | 348 | |
|
349 | 349 | id : <id_given_in_input> |
|
350 | 350 | result : { |
|
351 | 351 | "pull_request_id": "<Integer>", |
|
352 | 352 | "comment_id": "<Integer>", |
|
353 | 353 | "status": {"given": <given_status>, |
|
354 | 354 | "was_changed": <bool status_was_actually_changed> }, |
|
355 | 355 | }, |
|
356 | 356 | error : null |
|
357 | 357 | """ |
|
358 | 358 | repo = get_repo_or_error(repoid) |
|
359 | 359 | if not isinstance(userid, Optional): |
|
360 | 360 | if (has_superadmin_permission(apiuser) or |
|
361 | 361 | HasRepoPermissionAnyApi('repository.admin')( |
|
362 | 362 | user=apiuser, repo_name=repo.repo_name)): |
|
363 | 363 | apiuser = get_user_or_error(userid) |
|
364 | 364 | else: |
|
365 | 365 | raise JSONRPCError('userid is not the same as your user') |
|
366 | 366 | |
|
367 | 367 | pull_request = get_pull_request_or_error(pullrequestid) |
|
368 | 368 | if not PullRequestModel().check_user_read( |
|
369 | 369 | pull_request, apiuser, api=True): |
|
370 | 370 | raise JSONRPCError('repository `%s` does not exist' % (repoid,)) |
|
371 | 371 | message = Optional.extract(message) |
|
372 | 372 | status = Optional.extract(status) |
|
373 | 373 | commit_id = Optional.extract(commit_id) |
|
374 | 374 | comment_type = Optional.extract(comment_type) |
|
375 | 375 | resolves_comment_id = Optional.extract(resolves_comment_id) |
|
376 | 376 | |
|
377 | 377 | if not message and not status: |
|
378 | 378 | raise JSONRPCError( |
|
379 | 379 | 'Both message and status parameters are missing. ' |
|
380 | 380 | 'At least one is required.') |
|
381 | 381 | |
|
382 | 382 | if (status not in (st[0] for st in ChangesetStatus.STATUSES) and |
|
383 | 383 | status is not None): |
|
384 | 384 | raise JSONRPCError('Unknown comment status: `%s`' % status) |
|
385 | 385 | |
|
386 | 386 | if commit_id and commit_id not in pull_request.revisions: |
|
387 | 387 | raise JSONRPCError( |
|
388 | 388 | 'Invalid commit_id `%s` for this pull request.' % commit_id) |
|
389 | 389 | |
|
390 | 390 | allowed_to_change_status = PullRequestModel().check_user_change_status( |
|
391 | 391 | pull_request, apiuser) |
|
392 | 392 | |
|
393 | 393 | # if commit_id is passed re-validated if user is allowed to change status |
|
394 | 394 | # based on latest commit_id from the PR |
|
395 | 395 | if commit_id: |
|
396 | 396 | commit_idx = pull_request.revisions.index(commit_id) |
|
397 | 397 | if commit_idx != 0: |
|
398 | 398 | allowed_to_change_status = False |
|
399 | 399 | |
|
400 | 400 | if resolves_comment_id: |
|
401 | 401 | comment = ChangesetComment.get(resolves_comment_id) |
|
402 | 402 | if not comment: |
|
403 | 403 | raise JSONRPCError( |
|
404 | 404 | 'Invalid resolves_comment_id `%s` for this pull request.' |
|
405 | 405 | % resolves_comment_id) |
|
406 | 406 | if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO: |
|
407 | 407 | raise JSONRPCError( |
|
408 | 408 | 'Comment `%s` is wrong type for setting status to resolved.' |
|
409 | 409 | % resolves_comment_id) |
|
410 | 410 | |
|
411 | 411 | text = message |
|
412 | 412 | status_label = ChangesetStatus.get_status_lbl(status) |
|
413 | 413 | if status and allowed_to_change_status: |
|
414 | 414 | st_message = ('Status change %(transition_icon)s %(status)s' |
|
415 | 415 | % {'transition_icon': '>', 'status': status_label}) |
|
416 | 416 | text = message or st_message |
|
417 | 417 | |
|
418 | 418 | rc_config = SettingsModel().get_all_settings() |
|
419 | 419 | renderer = rc_config.get('rhodecode_markup_renderer', 'rst') |
|
420 | 420 | |
|
421 | 421 | status_change = status and allowed_to_change_status |
|
422 | 422 | comment = CommentsModel().create( |
|
423 | 423 | text=text, |
|
424 | 424 | repo=pull_request.target_repo.repo_id, |
|
425 | 425 | user=apiuser.user_id, |
|
426 | 426 | pull_request=pull_request.pull_request_id, |
|
427 | 427 | f_path=None, |
|
428 | 428 | line_no=None, |
|
429 | 429 | status_change=(status_label if status_change else None), |
|
430 | 430 | status_change_type=(status if status_change else None), |
|
431 | 431 | closing_pr=False, |
|
432 | 432 | renderer=renderer, |
|
433 | 433 | comment_type=comment_type, |
|
434 | 434 | resolves_comment_id=resolves_comment_id |
|
435 | 435 | ) |
|
436 | 436 | |
|
437 | 437 | if allowed_to_change_status and status: |
|
438 | 438 | ChangesetStatusModel().set_status( |
|
439 | 439 | pull_request.target_repo.repo_id, |
|
440 | 440 | status, |
|
441 | 441 | apiuser.user_id, |
|
442 | 442 | comment, |
|
443 | 443 | pull_request=pull_request.pull_request_id |
|
444 | 444 | ) |
|
445 | 445 | Session().flush() |
|
446 | 446 | |
|
447 | 447 | Session().commit() |
|
448 | 448 | data = { |
|
449 | 449 | 'pull_request_id': pull_request.pull_request_id, |
|
450 | 450 | 'comment_id': comment.comment_id if comment else None, |
|
451 | 451 | 'status': {'given': status, 'was_changed': status_change}, |
|
452 | 452 | } |
|
453 | 453 | return data |
|
454 | 454 | |
|
455 | 455 | |
|
456 | 456 | @jsonrpc_method() |
|
457 | 457 | def create_pull_request( |
|
458 | 458 | request, apiuser, source_repo, target_repo, source_ref, target_ref, |
|
459 | 459 | title, description=Optional(''), reviewers=Optional(None)): |
|
460 | 460 | """ |
|
461 | 461 | Creates a new pull request. |
|
462 | 462 | |
|
463 | 463 | Accepts refs in the following formats: |
|
464 | 464 | |
|
465 | 465 | * branch:<branch_name>:<sha> |
|
466 | 466 | * branch:<branch_name> |
|
467 | 467 | * bookmark:<bookmark_name>:<sha> (Mercurial only) |
|
468 | 468 | * bookmark:<bookmark_name> (Mercurial only) |
|
469 | 469 | |
|
470 | 470 | :param apiuser: This is filled automatically from the |authtoken|. |
|
471 | 471 | :type apiuser: AuthUser |
|
472 | 472 | :param source_repo: Set the source repository name. |
|
473 | 473 | :type source_repo: str |
|
474 | 474 | :param target_repo: Set the target repository name. |
|
475 | 475 | :type target_repo: str |
|
476 | 476 | :param source_ref: Set the source ref name. |
|
477 | 477 | :type source_ref: str |
|
478 | 478 | :param target_ref: Set the target ref name. |
|
479 | 479 | :type target_ref: str |
|
480 | 480 | :param title: Set the pull request title. |
|
481 | 481 | :type title: str |
|
482 | 482 | :param description: Set the pull request description. |
|
483 | 483 | :type description: Optional(str) |
|
484 | 484 | :param reviewers: Set the new pull request reviewers list. |
|
485 | 485 | Reviewer defined by review rules will be added automatically to the |
|
486 | 486 | defined list. |
|
487 | 487 | :type reviewers: Optional(list) |
|
488 | 488 | Accepts username strings or objects of the format: |
|
489 | 489 | |
|
490 | 490 | [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}] |
|
491 | 491 | """ |
|
492 | 492 | |
|
493 | 493 | source_db_repo = get_repo_or_error(source_repo) |
|
494 | 494 | target_db_repo = get_repo_or_error(target_repo) |
|
495 | 495 | if not has_superadmin_permission(apiuser): |
|
496 | 496 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
497 | 497 | validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms) |
|
498 | 498 | |
|
499 | 499 | full_source_ref = resolve_ref_or_error(source_ref, source_db_repo) |
|
500 | 500 | full_target_ref = resolve_ref_or_error(target_ref, target_db_repo) |
|
501 | 501 | source_commit = get_commit_or_error(full_source_ref, source_db_repo) |
|
502 | 502 | target_commit = get_commit_or_error(full_target_ref, target_db_repo) |
|
503 | 503 | source_scm = source_db_repo.scm_instance() |
|
504 | 504 | target_scm = target_db_repo.scm_instance() |
|
505 | 505 | |
|
506 | 506 | commit_ranges = target_scm.compare( |
|
507 | 507 | target_commit.raw_id, source_commit.raw_id, source_scm, |
|
508 | 508 | merge=True, pre_load=[]) |
|
509 | 509 | |
|
510 | 510 | ancestor = target_scm.get_common_ancestor( |
|
511 | 511 | target_commit.raw_id, source_commit.raw_id, source_scm) |
|
512 | 512 | |
|
513 | 513 | if not commit_ranges: |
|
514 | 514 | raise JSONRPCError('no commits found') |
|
515 | 515 | |
|
516 | 516 | if not ancestor: |
|
517 | 517 | raise JSONRPCError('no common ancestor found') |
|
518 | 518 | |
|
519 | 519 | reviewer_objects = Optional.extract(reviewers) or [] |
|
520 | 520 | |
|
521 | 521 | if reviewer_objects: |
|
522 | 522 | schema = ReviewerListSchema() |
|
523 | 523 | try: |
|
524 | 524 | reviewer_objects = schema.deserialize(reviewer_objects) |
|
525 | 525 | except Invalid as err: |
|
526 | 526 | raise JSONRPCValidationError(colander_exc=err) |
|
527 | 527 | |
|
528 | 528 | # validate users |
|
529 | 529 | for reviewer_object in reviewer_objects: |
|
530 | 530 | user = get_user_or_error(reviewer_object['username']) |
|
531 | 531 | reviewer_object['user_id'] = user.user_id |
|
532 | 532 | |
|
533 | 533 | get_default_reviewers_data, get_validated_reviewers = \ |
|
534 | 534 | PullRequestModel().get_reviewer_functions() |
|
535 | 535 | |
|
536 | 536 | reviewer_rules = get_default_reviewers_data( |
|
537 | 537 | apiuser.get_instance(), source_db_repo, |
|
538 | 538 | source_commit, target_db_repo, target_commit) |
|
539 | 539 | |
|
540 | 540 | # specified rules are later re-validated, thus we can assume users will |
|
541 | 541 | # eventually provide those that meet the reviewer criteria. |
|
542 | 542 | if not reviewer_objects: |
|
543 | 543 | reviewer_objects = reviewer_rules['reviewers'] |
|
544 | 544 | |
|
545 | 545 | try: |
|
546 | 546 | reviewers = get_validated_reviewers( |
|
547 | 547 | reviewer_objects, reviewer_rules) |
|
548 | 548 | except ValueError as e: |
|
549 | 549 | raise JSONRPCError('Reviewers Validation: {}'.format(e)) |
|
550 | 550 | |
|
551 | 551 | pull_request_model = PullRequestModel() |
|
552 | 552 | pull_request = pull_request_model.create( |
|
553 | 553 | created_by=apiuser.user_id, |
|
554 | 554 | source_repo=source_repo, |
|
555 | 555 | source_ref=full_source_ref, |
|
556 | 556 | target_repo=target_repo, |
|
557 | 557 | target_ref=full_target_ref, |
|
558 | 558 | revisions=reversed( |
|
559 | 559 | [commit.raw_id for commit in reversed(commit_ranges)]), |
|
560 | 560 | reviewers=reviewers, |
|
561 | 561 | title=title, |
|
562 | 562 | description=Optional.extract(description) |
|
563 | 563 | ) |
|
564 | 564 | |
|
565 | 565 | Session().commit() |
|
566 | 566 | data = { |
|
567 | 567 | 'msg': 'Created new pull request `{}`'.format(title), |
|
568 | 568 | 'pull_request_id': pull_request.pull_request_id, |
|
569 | 569 | } |
|
570 | 570 | return data |
|
571 | 571 | |
|
572 | 572 | |
|
573 | 573 | @jsonrpc_method() |
|
574 | 574 | def update_pull_request( |
|
575 | 575 | request, apiuser, repoid, pullrequestid, title=Optional(''), |
|
576 | 576 | description=Optional(''), reviewers=Optional(None), |
|
577 | 577 | update_commits=Optional(None)): |
|
578 | 578 | """ |
|
579 | 579 | Updates a pull request. |
|
580 | 580 | |
|
581 | 581 | :param apiuser: This is filled automatically from the |authtoken|. |
|
582 | 582 | :type apiuser: AuthUser |
|
583 | 583 | :param repoid: The repository name or repository ID. |
|
584 | 584 | :type repoid: str or int |
|
585 | 585 | :param pullrequestid: The pull request ID. |
|
586 | 586 | :type pullrequestid: int |
|
587 | 587 | :param title: Set the pull request title. |
|
588 | 588 | :type title: str |
|
589 | 589 | :param description: Update pull request description. |
|
590 | 590 | :type description: Optional(str) |
|
591 | 591 | :param reviewers: Update pull request reviewers list with new value. |
|
592 | 592 | :type reviewers: Optional(list) |
|
593 | 593 | Accepts username strings or objects of the format: |
|
594 | 594 | |
|
595 | 595 | [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}] |
|
596 | 596 | |
|
597 | 597 | :param update_commits: Trigger update of commits for this pull request |
|
598 | 598 | :type: update_commits: Optional(bool) |
|
599 | 599 | |
|
600 | 600 | Example output: |
|
601 | 601 | |
|
602 | 602 | .. code-block:: bash |
|
603 | 603 | |
|
604 | 604 | id : <id_given_in_input> |
|
605 | 605 | result : { |
|
606 | 606 | "msg": "Updated pull request `63`", |
|
607 | 607 | "pull_request": <pull_request_object>, |
|
608 | 608 | "updated_reviewers": { |
|
609 | 609 | "added": [ |
|
610 | 610 | "username" |
|
611 | 611 | ], |
|
612 | 612 | "removed": [] |
|
613 | 613 | }, |
|
614 | 614 | "updated_commits": { |
|
615 | 615 | "added": [ |
|
616 | 616 | "<sha1_hash>" |
|
617 | 617 | ], |
|
618 | 618 | "common": [ |
|
619 | 619 | "<sha1_hash>", |
|
620 | 620 | "<sha1_hash>", |
|
621 | 621 | ], |
|
622 | 622 | "removed": [] |
|
623 | 623 | } |
|
624 | 624 | } |
|
625 | 625 | error : null |
|
626 | 626 | """ |
|
627 | 627 | |
|
628 | 628 | repo = get_repo_or_error(repoid) |
|
629 | 629 | pull_request = get_pull_request_or_error(pullrequestid) |
|
630 | 630 | if not PullRequestModel().check_user_update( |
|
631 | 631 | pull_request, apiuser, api=True): |
|
632 | 632 | raise JSONRPCError( |
|
633 | 633 | 'pull request `%s` update failed, no permission to update.' % ( |
|
634 | 634 | pullrequestid,)) |
|
635 | 635 | if pull_request.is_closed(): |
|
636 | 636 | raise JSONRPCError( |
|
637 | 637 | 'pull request `%s` update failed, pull request is closed' % ( |
|
638 | 638 | pullrequestid,)) |
|
639 | 639 | |
|
640 | 640 | reviewer_objects = Optional.extract(reviewers) or [] |
|
641 | 641 | |
|
642 | 642 | if reviewer_objects: |
|
643 | 643 | schema = ReviewerListSchema() |
|
644 | 644 | try: |
|
645 | 645 | reviewer_objects = schema.deserialize(reviewer_objects) |
|
646 | 646 | except Invalid as err: |
|
647 | 647 | raise JSONRPCValidationError(colander_exc=err) |
|
648 | 648 | |
|
649 | 649 | # validate users |
|
650 | 650 | for reviewer_object in reviewer_objects: |
|
651 | 651 | user = get_user_or_error(reviewer_object['username']) |
|
652 | 652 | reviewer_object['user_id'] = user.user_id |
|
653 | 653 | |
|
654 | 654 | get_default_reviewers_data, get_validated_reviewers = \ |
|
655 | 655 | PullRequestModel().get_reviewer_functions() |
|
656 | 656 | |
|
657 | 657 | # re-use stored rules |
|
658 | 658 | reviewer_rules = pull_request.reviewer_data |
|
659 | 659 | try: |
|
660 | 660 | reviewers = get_validated_reviewers( |
|
661 | 661 | reviewer_objects, reviewer_rules) |
|
662 | 662 | except ValueError as e: |
|
663 | 663 | raise JSONRPCError('Reviewers Validation: {}'.format(e)) |
|
664 | 664 | else: |
|
665 | 665 | reviewers = [] |
|
666 | 666 | |
|
667 | 667 | title = Optional.extract(title) |
|
668 | 668 | description = Optional.extract(description) |
|
669 | 669 | if title or description: |
|
670 | 670 | PullRequestModel().edit( |
|
671 | 671 | pull_request, title or pull_request.title, |
|
672 | description or pull_request.description) | |
|
672 | description or pull_request.description, apiuser) | |
|
673 | 673 | Session().commit() |
|
674 | 674 | |
|
675 | 675 | commit_changes = {"added": [], "common": [], "removed": []} |
|
676 | 676 | if str2bool(Optional.extract(update_commits)): |
|
677 | 677 | if PullRequestModel().has_valid_update_type(pull_request): |
|
678 | 678 | update_response = PullRequestModel().update_commits( |
|
679 | 679 | pull_request) |
|
680 | 680 | commit_changes = update_response.changes or commit_changes |
|
681 | 681 | Session().commit() |
|
682 | 682 | |
|
683 | 683 | reviewers_changes = {"added": [], "removed": []} |
|
684 | 684 | if reviewers: |
|
685 | 685 | added_reviewers, removed_reviewers = \ |
|
686 | PullRequestModel().update_reviewers(pull_request, reviewers) | |
|
686 | PullRequestModel().update_reviewers(pull_request, reviewers, apiuser) | |
|
687 | 687 | |
|
688 | 688 | reviewers_changes['added'] = sorted( |
|
689 | 689 | [get_user_or_error(n).username for n in added_reviewers]) |
|
690 | 690 | reviewers_changes['removed'] = sorted( |
|
691 | 691 | [get_user_or_error(n).username for n in removed_reviewers]) |
|
692 | 692 | Session().commit() |
|
693 | 693 | |
|
694 | 694 | data = { |
|
695 | 695 | 'msg': 'Updated pull request `{}`'.format( |
|
696 | 696 | pull_request.pull_request_id), |
|
697 | 697 | 'pull_request': pull_request.get_api_data(), |
|
698 | 698 | 'updated_commits': commit_changes, |
|
699 | 699 | 'updated_reviewers': reviewers_changes |
|
700 | 700 | } |
|
701 | 701 | |
|
702 | 702 | return data |
|
703 | 703 | |
|
704 | 704 | |
|
705 | 705 | @jsonrpc_method() |
|
706 | 706 | def close_pull_request( |
|
707 | 707 | request, apiuser, repoid, pullrequestid, |
|
708 | 708 | userid=Optional(OAttr('apiuser')), message=Optional('')): |
|
709 | 709 | """ |
|
710 | 710 | Close the pull request specified by `pullrequestid`. |
|
711 | 711 | |
|
712 | 712 | :param apiuser: This is filled automatically from the |authtoken|. |
|
713 | 713 | :type apiuser: AuthUser |
|
714 | 714 | :param repoid: Repository name or repository ID to which the pull |
|
715 | 715 | request belongs. |
|
716 | 716 | :type repoid: str or int |
|
717 | 717 | :param pullrequestid: ID of the pull request to be closed. |
|
718 | 718 | :type pullrequestid: int |
|
719 | 719 | :param userid: Close the pull request as this user. |
|
720 | 720 | :type userid: Optional(str or int) |
|
721 | 721 | :param message: Optional message to close the Pull Request with. If not |
|
722 | 722 | specified it will be generated automatically. |
|
723 | 723 | :type message: Optional(str) |
|
724 | 724 | |
|
725 | 725 | Example output: |
|
726 | 726 | |
|
727 | 727 | .. code-block:: bash |
|
728 | 728 | |
|
729 | 729 | "id": <id_given_in_input>, |
|
730 | 730 | "result": { |
|
731 | 731 | "pull_request_id": "<int>", |
|
732 | 732 | "close_status": "<str:status_lbl>, |
|
733 | 733 | "closed": "<bool>" |
|
734 | 734 | }, |
|
735 | 735 | "error": null |
|
736 | 736 | |
|
737 | 737 | """ |
|
738 | 738 | _ = request.translate |
|
739 | 739 | |
|
740 | 740 | repo = get_repo_or_error(repoid) |
|
741 | 741 | if not isinstance(userid, Optional): |
|
742 | 742 | if (has_superadmin_permission(apiuser) or |
|
743 | 743 | HasRepoPermissionAnyApi('repository.admin')( |
|
744 | 744 | user=apiuser, repo_name=repo.repo_name)): |
|
745 | 745 | apiuser = get_user_or_error(userid) |
|
746 | 746 | else: |
|
747 | 747 | raise JSONRPCError('userid is not the same as your user') |
|
748 | 748 | |
|
749 | 749 | pull_request = get_pull_request_or_error(pullrequestid) |
|
750 | 750 | |
|
751 | 751 | if pull_request.is_closed(): |
|
752 | 752 | raise JSONRPCError( |
|
753 | 753 | 'pull request `%s` is already closed' % (pullrequestid,)) |
|
754 | 754 | |
|
755 | 755 | # only owner or admin or person with write permissions |
|
756 | 756 | allowed_to_close = PullRequestModel().check_user_update( |
|
757 | 757 | pull_request, apiuser, api=True) |
|
758 | 758 | |
|
759 | 759 | if not allowed_to_close: |
|
760 | 760 | raise JSONRPCError( |
|
761 | 761 | 'pull request `%s` close failed, no permission to close.' % ( |
|
762 | 762 | pullrequestid,)) |
|
763 | 763 | |
|
764 | 764 | # message we're using to close the PR, else it's automatically generated |
|
765 | 765 | message = Optional.extract(message) |
|
766 | 766 | |
|
767 | 767 | # finally close the PR, with proper message comment |
|
768 | 768 | comment, status = PullRequestModel().close_pull_request_with_comment( |
|
769 | 769 | pull_request, apiuser, repo, message=message) |
|
770 | 770 | status_lbl = ChangesetStatus.get_status_lbl(status) |
|
771 | 771 | |
|
772 | 772 | Session().commit() |
|
773 | 773 | |
|
774 | 774 | data = { |
|
775 | 775 | 'pull_request_id': pull_request.pull_request_id, |
|
776 | 776 | 'close_status': status_lbl, |
|
777 | 777 | 'closed': True, |
|
778 | 778 | } |
|
779 | 779 | return data |
@@ -1,643 +1,643 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Users crud controller for pylons |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | import formencode |
|
27 | 27 | |
|
28 | 28 | from formencode import htmlfill |
|
29 | 29 | from pylons import request, tmpl_context as c, url, config |
|
30 | 30 | from pylons.controllers.util import redirect |
|
31 | 31 | from pylons.i18n.translation import _ |
|
32 | 32 | |
|
33 | 33 | from rhodecode.authentication.plugins import auth_rhodecode |
|
34 | 34 | |
|
35 | 35 | from rhodecode.lib import helpers as h |
|
36 | 36 | from rhodecode.lib import auth |
|
37 | 37 | from rhodecode.lib import audit_logger |
|
38 | 38 | from rhodecode.lib.auth import ( |
|
39 | 39 | LoginRequired, HasPermissionAllDecorator, AuthUser) |
|
40 | 40 | from rhodecode.lib.base import BaseController, render |
|
41 | 41 | from rhodecode.lib.exceptions import ( |
|
42 | 42 | DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException, |
|
43 | 43 | UserOwnsUserGroupsException, UserCreationError) |
|
44 | 44 | from rhodecode.lib.utils2 import safe_int, AttributeDict |
|
45 | 45 | |
|
46 | 46 | from rhodecode.model.db import ( |
|
47 | 47 | PullRequestReviewers, User, UserEmailMap, UserIpMap, RepoGroup) |
|
48 | 48 | from rhodecode.model.forms import ( |
|
49 | 49 | UserForm, UserPermissionsForm, UserIndividualPermissionsForm) |
|
50 | 50 | from rhodecode.model.repo_group import RepoGroupModel |
|
51 | 51 | from rhodecode.model.user import UserModel |
|
52 | 52 | from rhodecode.model.meta import Session |
|
53 | 53 | from rhodecode.model.permission import PermissionModel |
|
54 | 54 | |
|
55 | 55 | log = logging.getLogger(__name__) |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | class UsersController(BaseController): |
|
59 | 59 | """REST Controller styled on the Atom Publishing Protocol""" |
|
60 | 60 | |
|
61 | 61 | @LoginRequired() |
|
62 | 62 | def __before__(self): |
|
63 | 63 | super(UsersController, self).__before__() |
|
64 | 64 | c.available_permissions = config['available_permissions'] |
|
65 | 65 | c.allowed_languages = [ |
|
66 | 66 | ('en', 'English (en)'), |
|
67 | 67 | ('de', 'German (de)'), |
|
68 | 68 | ('fr', 'French (fr)'), |
|
69 | 69 | ('it', 'Italian (it)'), |
|
70 | 70 | ('ja', 'Japanese (ja)'), |
|
71 | 71 | ('pl', 'Polish (pl)'), |
|
72 | 72 | ('pt', 'Portuguese (pt)'), |
|
73 | 73 | ('ru', 'Russian (ru)'), |
|
74 | 74 | ('zh', 'Chinese (zh)'), |
|
75 | 75 | ] |
|
76 | 76 | PermissionModel().set_global_permission_choices(c, gettext_translator=_) |
|
77 | 77 | |
|
78 | 78 | def _get_personal_repo_group_template_vars(self): |
|
79 | 79 | DummyUser = AttributeDict({ |
|
80 | 80 | 'username': '${username}', |
|
81 | 81 | 'user_id': '${user_id}', |
|
82 | 82 | }) |
|
83 | 83 | c.default_create_repo_group = RepoGroupModel() \ |
|
84 | 84 | .get_default_create_personal_repo_group() |
|
85 | 85 | c.personal_repo_group_name = RepoGroupModel() \ |
|
86 | 86 | .get_personal_group_name(DummyUser) |
|
87 | 87 | |
|
88 | 88 | @HasPermissionAllDecorator('hg.admin') |
|
89 | 89 | @auth.CSRFRequired() |
|
90 | 90 | def create(self): |
|
91 | 91 | c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.name |
|
92 | 92 | user_model = UserModel() |
|
93 | 93 | user_form = UserForm()() |
|
94 | 94 | try: |
|
95 | 95 | form_result = user_form.to_python(dict(request.POST)) |
|
96 | 96 | user = user_model.create(form_result) |
|
97 | 97 | Session().flush() |
|
98 | 98 | creation_data = user.get_api_data() |
|
99 | 99 | username = form_result['username'] |
|
100 | 100 | |
|
101 | 101 | audit_logger.store_web( |
|
102 | 102 | 'user.create', action_data={'data': creation_data}, |
|
103 | 103 | user=c.rhodecode_user) |
|
104 | 104 | |
|
105 | 105 | user_link = h.link_to(h.escape(username), |
|
106 | 106 | url('edit_user', |
|
107 | 107 | user_id=user.user_id)) |
|
108 | 108 | h.flash(h.literal(_('Created user %(user_link)s') |
|
109 | 109 | % {'user_link': user_link}), category='success') |
|
110 | 110 | Session().commit() |
|
111 | 111 | except formencode.Invalid as errors: |
|
112 | 112 | self._get_personal_repo_group_template_vars() |
|
113 | 113 | return htmlfill.render( |
|
114 | 114 | render('admin/users/user_add.mako'), |
|
115 | 115 | defaults=errors.value, |
|
116 | 116 | errors=errors.error_dict or {}, |
|
117 | 117 | prefix_error=False, |
|
118 | 118 | encoding="UTF-8", |
|
119 | 119 | force_defaults=False) |
|
120 | 120 | except UserCreationError as e: |
|
121 | 121 | h.flash(e, 'error') |
|
122 | 122 | except Exception: |
|
123 | 123 | log.exception("Exception creation of user") |
|
124 | 124 | h.flash(_('Error occurred during creation of user %s') |
|
125 | 125 | % request.POST.get('username'), category='error') |
|
126 | 126 | return redirect(h.route_path('users')) |
|
127 | 127 | |
|
128 | 128 | @HasPermissionAllDecorator('hg.admin') |
|
129 | 129 | def new(self): |
|
130 | 130 | c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.name |
|
131 | 131 | self._get_personal_repo_group_template_vars() |
|
132 | 132 | return render('admin/users/user_add.mako') |
|
133 | 133 | |
|
134 | 134 | @HasPermissionAllDecorator('hg.admin') |
|
135 | 135 | @auth.CSRFRequired() |
|
136 | 136 | def update(self, user_id): |
|
137 | 137 | |
|
138 | 138 | user_id = safe_int(user_id) |
|
139 | 139 | c.user = User.get_or_404(user_id) |
|
140 | 140 | c.active = 'profile' |
|
141 | 141 | c.extern_type = c.user.extern_type |
|
142 | 142 | c.extern_name = c.user.extern_name |
|
143 | 143 | c.perm_user = AuthUser(user_id=user_id, ip_addr=self.ip_addr) |
|
144 | 144 | available_languages = [x[0] for x in c.allowed_languages] |
|
145 | 145 | _form = UserForm(edit=True, available_languages=available_languages, |
|
146 | 146 | old_data={'user_id': user_id, |
|
147 | 147 | 'email': c.user.email})() |
|
148 | 148 | form_result = {} |
|
149 | 149 | old_values = c.user.get_api_data() |
|
150 | 150 | try: |
|
151 | 151 | form_result = _form.to_python(dict(request.POST)) |
|
152 | 152 | skip_attrs = ['extern_type', 'extern_name'] |
|
153 | 153 | # TODO: plugin should define if username can be updated |
|
154 | 154 | if c.extern_type != "rhodecode": |
|
155 | 155 | # forbid updating username for external accounts |
|
156 | 156 | skip_attrs.append('username') |
|
157 | 157 | |
|
158 | 158 | UserModel().update_user( |
|
159 | 159 | user_id, skip_attrs=skip_attrs, **form_result) |
|
160 | 160 | |
|
161 | 161 | audit_logger.store_web( |
|
162 | 162 | 'user.edit', action_data={'old_data': old_values}, |
|
163 | 163 | user=c.rhodecode_user) |
|
164 | 164 | |
|
165 | 165 | Session().commit() |
|
166 | 166 | h.flash(_('User updated successfully'), category='success') |
|
167 | 167 | except formencode.Invalid as errors: |
|
168 | 168 | defaults = errors.value |
|
169 | 169 | e = errors.error_dict or {} |
|
170 | 170 | |
|
171 | 171 | return htmlfill.render( |
|
172 | 172 | render('admin/users/user_edit.mako'), |
|
173 | 173 | defaults=defaults, |
|
174 | 174 | errors=e, |
|
175 | 175 | prefix_error=False, |
|
176 | 176 | encoding="UTF-8", |
|
177 | 177 | force_defaults=False) |
|
178 | 178 | except UserCreationError as e: |
|
179 | 179 | h.flash(e, 'error') |
|
180 | 180 | except Exception: |
|
181 | 181 | log.exception("Exception updating user") |
|
182 | 182 | h.flash(_('Error occurred during update of user %s') |
|
183 | 183 | % form_result.get('username'), category='error') |
|
184 | 184 | return redirect(url('edit_user', user_id=user_id)) |
|
185 | 185 | |
|
186 | 186 | @HasPermissionAllDecorator('hg.admin') |
|
187 | 187 | @auth.CSRFRequired() |
|
188 | 188 | def delete(self, user_id): |
|
189 | 189 | user_id = safe_int(user_id) |
|
190 | 190 | c.user = User.get_or_404(user_id) |
|
191 | 191 | |
|
192 | 192 | _repos = c.user.repositories |
|
193 | 193 | _repo_groups = c.user.repository_groups |
|
194 | 194 | _user_groups = c.user.user_groups |
|
195 | 195 | |
|
196 | 196 | handle_repos = None |
|
197 | 197 | handle_repo_groups = None |
|
198 | 198 | handle_user_groups = None |
|
199 | 199 | # dummy call for flash of handle |
|
200 | 200 | set_handle_flash_repos = lambda: None |
|
201 | 201 | set_handle_flash_repo_groups = lambda: None |
|
202 | 202 | set_handle_flash_user_groups = lambda: None |
|
203 | 203 | |
|
204 | 204 | if _repos and request.POST.get('user_repos'): |
|
205 | 205 | do = request.POST['user_repos'] |
|
206 | 206 | if do == 'detach': |
|
207 | 207 | handle_repos = 'detach' |
|
208 | 208 | set_handle_flash_repos = lambda: h.flash( |
|
209 | 209 | _('Detached %s repositories') % len(_repos), |
|
210 | 210 | category='success') |
|
211 | 211 | elif do == 'delete': |
|
212 | 212 | handle_repos = 'delete' |
|
213 | 213 | set_handle_flash_repos = lambda: h.flash( |
|
214 | 214 | _('Deleted %s repositories') % len(_repos), |
|
215 | 215 | category='success') |
|
216 | 216 | |
|
217 | 217 | if _repo_groups and request.POST.get('user_repo_groups'): |
|
218 | 218 | do = request.POST['user_repo_groups'] |
|
219 | 219 | if do == 'detach': |
|
220 | 220 | handle_repo_groups = 'detach' |
|
221 | 221 | set_handle_flash_repo_groups = lambda: h.flash( |
|
222 | 222 | _('Detached %s repository groups') % len(_repo_groups), |
|
223 | 223 | category='success') |
|
224 | 224 | elif do == 'delete': |
|
225 | 225 | handle_repo_groups = 'delete' |
|
226 | 226 | set_handle_flash_repo_groups = lambda: h.flash( |
|
227 | 227 | _('Deleted %s repository groups') % len(_repo_groups), |
|
228 | 228 | category='success') |
|
229 | 229 | |
|
230 | 230 | if _user_groups and request.POST.get('user_user_groups'): |
|
231 | 231 | do = request.POST['user_user_groups'] |
|
232 | 232 | if do == 'detach': |
|
233 | 233 | handle_user_groups = 'detach' |
|
234 | 234 | set_handle_flash_user_groups = lambda: h.flash( |
|
235 | 235 | _('Detached %s user groups') % len(_user_groups), |
|
236 | 236 | category='success') |
|
237 | 237 | elif do == 'delete': |
|
238 | 238 | handle_user_groups = 'delete' |
|
239 | 239 | set_handle_flash_user_groups = lambda: h.flash( |
|
240 | 240 | _('Deleted %s user groups') % len(_user_groups), |
|
241 | 241 | category='success') |
|
242 | 242 | |
|
243 | 243 | old_values = c.user.get_api_data() |
|
244 | 244 | try: |
|
245 | 245 | UserModel().delete(c.user, handle_repos=handle_repos, |
|
246 | 246 | handle_repo_groups=handle_repo_groups, |
|
247 | 247 | handle_user_groups=handle_user_groups) |
|
248 | 248 | |
|
249 | 249 | audit_logger.store_web( |
|
250 | 250 | 'user.delete', action_data={'old_data': old_values}, |
|
251 | 251 | user=c.rhodecode_user) |
|
252 | 252 | |
|
253 | 253 | Session().commit() |
|
254 | 254 | set_handle_flash_repos() |
|
255 | 255 | set_handle_flash_repo_groups() |
|
256 | 256 | set_handle_flash_user_groups() |
|
257 | 257 | h.flash(_('Successfully deleted user'), category='success') |
|
258 | 258 | except (UserOwnsReposException, UserOwnsRepoGroupsException, |
|
259 | 259 | UserOwnsUserGroupsException, DefaultUserException) as e: |
|
260 | 260 | h.flash(e, category='warning') |
|
261 | 261 | except Exception: |
|
262 | 262 | log.exception("Exception during deletion of user") |
|
263 | 263 | h.flash(_('An error occurred during deletion of user'), |
|
264 | 264 | category='error') |
|
265 | 265 | return redirect(h.route_path('users')) |
|
266 | 266 | |
|
267 | 267 | @HasPermissionAllDecorator('hg.admin') |
|
268 | 268 | @auth.CSRFRequired() |
|
269 | 269 | def reset_password(self, user_id): |
|
270 | 270 | """ |
|
271 | 271 | toggle reset password flag for this user |
|
272 | 272 | """ |
|
273 | 273 | user_id = safe_int(user_id) |
|
274 | 274 | c.user = User.get_or_404(user_id) |
|
275 | 275 | try: |
|
276 | 276 | old_value = c.user.user_data.get('force_password_change') |
|
277 | 277 | c.user.update_userdata(force_password_change=not old_value) |
|
278 | 278 | |
|
279 | 279 | if old_value: |
|
280 | 280 | msg = _('Force password change disabled for user') |
|
281 | 281 | audit_logger.store_web( |
|
282 | 282 | 'user.edit.password_reset.disabled', |
|
283 | 283 | user=c.rhodecode_user) |
|
284 | 284 | else: |
|
285 | 285 | msg = _('Force password change enabled for user') |
|
286 | 286 | audit_logger.store_web( |
|
287 | 287 | 'user.edit.password_reset.enabled', |
|
288 | 288 | user=c.rhodecode_user) |
|
289 | 289 | |
|
290 | 290 | Session().commit() |
|
291 | 291 | h.flash(msg, category='success') |
|
292 | 292 | except Exception: |
|
293 | 293 | log.exception("Exception during password reset for user") |
|
294 | 294 | h.flash(_('An error occurred during password reset for user'), |
|
295 | 295 | category='error') |
|
296 | 296 | |
|
297 | 297 | return redirect(url('edit_user_advanced', user_id=user_id)) |
|
298 | 298 | |
|
299 | 299 | @HasPermissionAllDecorator('hg.admin') |
|
300 | 300 | @auth.CSRFRequired() |
|
301 | 301 | def create_personal_repo_group(self, user_id): |
|
302 | 302 | """ |
|
303 | 303 | Create personal repository group for this user |
|
304 | 304 | """ |
|
305 | 305 | from rhodecode.model.repo_group import RepoGroupModel |
|
306 | 306 | |
|
307 | 307 | user_id = safe_int(user_id) |
|
308 | 308 | c.user = User.get_or_404(user_id) |
|
309 | 309 | personal_repo_group = RepoGroup.get_user_personal_repo_group( |
|
310 | 310 | c.user.user_id) |
|
311 | 311 | if personal_repo_group: |
|
312 | 312 | return redirect(url('edit_user_advanced', user_id=user_id)) |
|
313 | 313 | |
|
314 | 314 | personal_repo_group_name = RepoGroupModel().get_personal_group_name( |
|
315 | 315 | c.user) |
|
316 | 316 | named_personal_group = RepoGroup.get_by_group_name( |
|
317 | 317 | personal_repo_group_name) |
|
318 | 318 | try: |
|
319 | 319 | |
|
320 | 320 | if named_personal_group and named_personal_group.user_id == c.user.user_id: |
|
321 | 321 | # migrate the same named group, and mark it as personal |
|
322 | 322 | named_personal_group.personal = True |
|
323 | 323 | Session().add(named_personal_group) |
|
324 | 324 | Session().commit() |
|
325 | 325 | msg = _('Linked repository group `%s` as personal' % ( |
|
326 | 326 | personal_repo_group_name,)) |
|
327 | 327 | h.flash(msg, category='success') |
|
328 | 328 | elif not named_personal_group: |
|
329 | 329 | RepoGroupModel().create_personal_repo_group(c.user) |
|
330 | 330 | |
|
331 | 331 | msg = _('Created repository group `%s`' % ( |
|
332 | 332 | personal_repo_group_name,)) |
|
333 | 333 | h.flash(msg, category='success') |
|
334 | 334 | else: |
|
335 | 335 | msg = _('Repository group `%s` is already taken' % ( |
|
336 | 336 | personal_repo_group_name,)) |
|
337 | 337 | h.flash(msg, category='warning') |
|
338 | 338 | except Exception: |
|
339 | 339 | log.exception("Exception during repository group creation") |
|
340 | 340 | msg = _( |
|
341 | 341 | 'An error occurred during repository group creation for user') |
|
342 | 342 | h.flash(msg, category='error') |
|
343 | 343 | Session().rollback() |
|
344 | 344 | |
|
345 | 345 | return redirect(url('edit_user_advanced', user_id=user_id)) |
|
346 | 346 | |
|
347 | 347 | @HasPermissionAllDecorator('hg.admin') |
|
348 | 348 | def show(self, user_id): |
|
349 | 349 | """GET /users/user_id: Show a specific item""" |
|
350 | 350 | # url('user', user_id=ID) |
|
351 | 351 | User.get_or_404(-1) |
|
352 | 352 | |
|
353 | 353 | @HasPermissionAllDecorator('hg.admin') |
|
354 | 354 | def edit(self, user_id): |
|
355 | 355 | """GET /users/user_id/edit: Form to edit an existing item""" |
|
356 | 356 | # url('edit_user', user_id=ID) |
|
357 | 357 | user_id = safe_int(user_id) |
|
358 | 358 | c.user = User.get_or_404(user_id) |
|
359 | 359 | if c.user.username == User.DEFAULT_USER: |
|
360 | 360 | h.flash(_("You can't edit this user"), category='warning') |
|
361 | 361 | return redirect(h.route_path('users')) |
|
362 | 362 | |
|
363 | 363 | c.active = 'profile' |
|
364 | 364 | c.extern_type = c.user.extern_type |
|
365 | 365 | c.extern_name = c.user.extern_name |
|
366 | 366 | c.perm_user = AuthUser(user_id=user_id, ip_addr=self.ip_addr) |
|
367 | 367 | |
|
368 | 368 | defaults = c.user.get_dict() |
|
369 | 369 | defaults.update({'language': c.user.user_data.get('language')}) |
|
370 | 370 | return htmlfill.render( |
|
371 | 371 | render('admin/users/user_edit.mako'), |
|
372 | 372 | defaults=defaults, |
|
373 | 373 | encoding="UTF-8", |
|
374 | 374 | force_defaults=False) |
|
375 | 375 | |
|
376 | 376 | @HasPermissionAllDecorator('hg.admin') |
|
377 | 377 | def edit_advanced(self, user_id): |
|
378 | 378 | user_id = safe_int(user_id) |
|
379 | 379 | user = c.user = User.get_or_404(user_id) |
|
380 | 380 | if user.username == User.DEFAULT_USER: |
|
381 | 381 | h.flash(_("You can't edit this user"), category='warning') |
|
382 | 382 | return redirect(h.route_path('users')) |
|
383 | 383 | |
|
384 | 384 | c.active = 'advanced' |
|
385 | 385 | c.personal_repo_group = RepoGroup.get_user_personal_repo_group(user_id) |
|
386 | 386 | c.personal_repo_group_name = RepoGroupModel()\ |
|
387 | 387 | .get_personal_group_name(user) |
|
388 | 388 | c.first_admin = User.get_first_super_admin() |
|
389 | 389 | defaults = user.get_dict() |
|
390 | 390 | |
|
391 | 391 | # Interim workaround if the user participated on any pull requests as a |
|
392 | 392 | # reviewer. |
|
393 | 393 | has_review = bool(PullRequestReviewers.query().filter( |
|
394 | 394 | PullRequestReviewers.user_id == user_id).first()) |
|
395 | 395 | c.can_delete_user = not has_review |
|
396 | 396 | c.can_delete_user_message = _( |
|
397 | 397 | 'The user participates as reviewer in pull requests and ' |
|
398 | 398 | 'cannot be deleted. You can set the user to ' |
|
399 | 399 | '"inactive" instead of deleting it.') if has_review else '' |
|
400 | 400 | |
|
401 | 401 | return htmlfill.render( |
|
402 | 402 | render('admin/users/user_edit.mako'), |
|
403 | 403 | defaults=defaults, |
|
404 | 404 | encoding="UTF-8", |
|
405 | 405 | force_defaults=False) |
|
406 | 406 | |
|
407 | 407 | @HasPermissionAllDecorator('hg.admin') |
|
408 | 408 | def edit_global_perms(self, user_id): |
|
409 | 409 | user_id = safe_int(user_id) |
|
410 | 410 | c.user = User.get_or_404(user_id) |
|
411 | 411 | if c.user.username == User.DEFAULT_USER: |
|
412 | 412 | h.flash(_("You can't edit this user"), category='warning') |
|
413 | 413 | return redirect(h.route_path('users')) |
|
414 | 414 | |
|
415 | 415 | c.active = 'global_perms' |
|
416 | 416 | |
|
417 | 417 | c.default_user = User.get_default_user() |
|
418 | 418 | defaults = c.user.get_dict() |
|
419 | 419 | defaults.update(c.default_user.get_default_perms(suffix='_inherited')) |
|
420 | 420 | defaults.update(c.default_user.get_default_perms()) |
|
421 | 421 | defaults.update(c.user.get_default_perms()) |
|
422 | 422 | |
|
423 | 423 | return htmlfill.render( |
|
424 | 424 | render('admin/users/user_edit.mako'), |
|
425 | 425 | defaults=defaults, |
|
426 | 426 | encoding="UTF-8", |
|
427 | 427 | force_defaults=False) |
|
428 | 428 | |
|
429 | 429 | @HasPermissionAllDecorator('hg.admin') |
|
430 | 430 | @auth.CSRFRequired() |
|
431 | 431 | def update_global_perms(self, user_id): |
|
432 | 432 | user_id = safe_int(user_id) |
|
433 | 433 | user = User.get_or_404(user_id) |
|
434 | 434 | c.active = 'global_perms' |
|
435 | 435 | try: |
|
436 | 436 | # first stage that verifies the checkbox |
|
437 | 437 | _form = UserIndividualPermissionsForm() |
|
438 | 438 | form_result = _form.to_python(dict(request.POST)) |
|
439 | 439 | inherit_perms = form_result['inherit_default_permissions'] |
|
440 | 440 | user.inherit_default_permissions = inherit_perms |
|
441 | 441 | Session().add(user) |
|
442 | 442 | |
|
443 | 443 | if not inherit_perms: |
|
444 | 444 | # only update the individual ones if we un check the flag |
|
445 | 445 | _form = UserPermissionsForm( |
|
446 | 446 | [x[0] for x in c.repo_create_choices], |
|
447 | 447 | [x[0] for x in c.repo_create_on_write_choices], |
|
448 | 448 | [x[0] for x in c.repo_group_create_choices], |
|
449 | 449 | [x[0] for x in c.user_group_create_choices], |
|
450 | 450 | [x[0] for x in c.fork_choices], |
|
451 | 451 | [x[0] for x in c.inherit_default_permission_choices])() |
|
452 | 452 | |
|
453 | 453 | form_result = _form.to_python(dict(request.POST)) |
|
454 | 454 | form_result.update({'perm_user_id': user.user_id}) |
|
455 | 455 | |
|
456 | 456 | PermissionModel().update_user_permissions(form_result) |
|
457 | 457 | |
|
458 | 458 | # TODO(marcink): implement global permissions |
|
459 | 459 | # audit_log.store_web('user.edit.permissions') |
|
460 | 460 | |
|
461 | 461 | Session().commit() |
|
462 | 462 | h.flash(_('User global permissions updated successfully'), |
|
463 | 463 | category='success') |
|
464 | 464 | |
|
465 | 465 | except formencode.Invalid as errors: |
|
466 | 466 | defaults = errors.value |
|
467 | 467 | c.user = user |
|
468 | 468 | return htmlfill.render( |
|
469 | 469 | render('admin/users/user_edit.mako'), |
|
470 | 470 | defaults=defaults, |
|
471 | 471 | errors=errors.error_dict or {}, |
|
472 | 472 | prefix_error=False, |
|
473 | 473 | encoding="UTF-8", |
|
474 | 474 | force_defaults=False) |
|
475 | 475 | except Exception: |
|
476 | 476 | log.exception("Exception during permissions saving") |
|
477 | 477 | h.flash(_('An error occurred during permissions saving'), |
|
478 | 478 | category='error') |
|
479 | 479 | return redirect(url('edit_user_global_perms', user_id=user_id)) |
|
480 | 480 | |
|
481 | 481 | @HasPermissionAllDecorator('hg.admin') |
|
482 | 482 | def edit_perms_summary(self, user_id): |
|
483 | 483 | user_id = safe_int(user_id) |
|
484 | 484 | c.user = User.get_or_404(user_id) |
|
485 | 485 | if c.user.username == User.DEFAULT_USER: |
|
486 | 486 | h.flash(_("You can't edit this user"), category='warning') |
|
487 | 487 | return redirect(h.route_path('users')) |
|
488 | 488 | |
|
489 | 489 | c.active = 'perms_summary' |
|
490 | 490 | c.perm_user = AuthUser(user_id=user_id, ip_addr=self.ip_addr) |
|
491 | 491 | |
|
492 | 492 | return render('admin/users/user_edit.mako') |
|
493 | 493 | |
|
494 | 494 | @HasPermissionAllDecorator('hg.admin') |
|
495 | 495 | def edit_emails(self, user_id): |
|
496 | 496 | user_id = safe_int(user_id) |
|
497 | 497 | c.user = User.get_or_404(user_id) |
|
498 | 498 | if c.user.username == User.DEFAULT_USER: |
|
499 | 499 | h.flash(_("You can't edit this user"), category='warning') |
|
500 | 500 | return redirect(h.route_path('users')) |
|
501 | 501 | |
|
502 | 502 | c.active = 'emails' |
|
503 | 503 | c.user_email_map = UserEmailMap.query() \ |
|
504 | 504 | .filter(UserEmailMap.user == c.user).all() |
|
505 | 505 | |
|
506 | 506 | defaults = c.user.get_dict() |
|
507 | 507 | return htmlfill.render( |
|
508 | 508 | render('admin/users/user_edit.mako'), |
|
509 | 509 | defaults=defaults, |
|
510 | 510 | encoding="UTF-8", |
|
511 | 511 | force_defaults=False) |
|
512 | 512 | |
|
513 | 513 | @HasPermissionAllDecorator('hg.admin') |
|
514 | 514 | @auth.CSRFRequired() |
|
515 | 515 | def add_email(self, user_id): |
|
516 | 516 | user_id = safe_int(user_id) |
|
517 | 517 | c.user = User.get_or_404(user_id) |
|
518 | 518 | |
|
519 | 519 | email = request.POST.get('new_email') |
|
520 | 520 | user_model = UserModel() |
|
521 | 521 | user_data = c.user.get_api_data() |
|
522 | 522 | try: |
|
523 | 523 | user_model.add_extra_email(user_id, email) |
|
524 | 524 | audit_logger.store_web( |
|
525 | 525 | 'user.edit.email.add', |
|
526 | 526 | action_data={'email': email, 'user': user_data}, |
|
527 | 527 | user=c.rhodecode_user) |
|
528 | 528 | Session().commit() |
|
529 | 529 | h.flash(_("Added new email address `%s` for user account") % email, |
|
530 | 530 | category='success') |
|
531 | 531 | except formencode.Invalid as error: |
|
532 | 532 | msg = error.error_dict['email'] |
|
533 | 533 | h.flash(msg, category='error') |
|
534 | 534 | except Exception: |
|
535 | 535 | log.exception("Exception during email saving") |
|
536 | 536 | h.flash(_('An error occurred during email saving'), |
|
537 | 537 | category='error') |
|
538 | 538 | return redirect(url('edit_user_emails', user_id=user_id)) |
|
539 | 539 | |
|
540 | 540 | @HasPermissionAllDecorator('hg.admin') |
|
541 | 541 | @auth.CSRFRequired() |
|
542 | 542 | def delete_email(self, user_id): |
|
543 | 543 | user_id = safe_int(user_id) |
|
544 | 544 | c.user = User.get_or_404(user_id) |
|
545 | 545 | email_id = request.POST.get('del_email_id') |
|
546 | 546 | user_model = UserModel() |
|
547 | 547 | |
|
548 | 548 | email = UserEmailMap.query().get(email_id).email |
|
549 | 549 | user_data = c.user.get_api_data() |
|
550 | 550 | user_model.delete_extra_email(user_id, email_id) |
|
551 | 551 | audit_logger.store_web( |
|
552 | 552 | 'user.edit.email.delete', |
|
553 | 553 | action_data={'email': email, 'user': user_data}, |
|
554 | 554 | user=c.rhodecode_user) |
|
555 | 555 | Session().commit() |
|
556 | 556 | h.flash(_("Removed email address from user account"), category='success') |
|
557 | 557 | return redirect(url('edit_user_emails', user_id=user_id)) |
|
558 | 558 | |
|
559 | 559 | @HasPermissionAllDecorator('hg.admin') |
|
560 | 560 | def edit_ips(self, user_id): |
|
561 | 561 | user_id = safe_int(user_id) |
|
562 | 562 | c.user = User.get_or_404(user_id) |
|
563 | 563 | if c.user.username == User.DEFAULT_USER: |
|
564 | 564 | h.flash(_("You can't edit this user"), category='warning') |
|
565 | 565 | return redirect(h.route_path('users')) |
|
566 | 566 | |
|
567 | 567 | c.active = 'ips' |
|
568 | 568 | c.user_ip_map = UserIpMap.query() \ |
|
569 | 569 | .filter(UserIpMap.user == c.user).all() |
|
570 | 570 | |
|
571 | 571 | c.inherit_default_ips = c.user.inherit_default_permissions |
|
572 | 572 | c.default_user_ip_map = UserIpMap.query() \ |
|
573 | 573 | .filter(UserIpMap.user == User.get_default_user()).all() |
|
574 | 574 | |
|
575 | 575 | defaults = c.user.get_dict() |
|
576 | 576 | return htmlfill.render( |
|
577 | 577 | render('admin/users/user_edit.mako'), |
|
578 | 578 | defaults=defaults, |
|
579 | 579 | encoding="UTF-8", |
|
580 | 580 | force_defaults=False) |
|
581 | 581 | |
|
582 | 582 | @HasPermissionAllDecorator('hg.admin') |
|
583 | 583 | @auth.CSRFRequired() |
|
584 | 584 | def add_ip(self, user_id): |
|
585 | 585 | user_id = safe_int(user_id) |
|
586 | 586 | c.user = User.get_or_404(user_id) |
|
587 | 587 | user_model = UserModel() |
|
588 | 588 | try: |
|
589 | 589 | ip_list = user_model.parse_ip_range(request.POST.get('new_ip')) |
|
590 | 590 | except Exception as e: |
|
591 | 591 | ip_list = [] |
|
592 | 592 | log.exception("Exception during ip saving") |
|
593 | 593 | h.flash(_('An error occurred during ip saving:%s' % (e,)), |
|
594 | 594 | category='error') |
|
595 | 595 | |
|
596 | 596 | desc = request.POST.get('description') |
|
597 | 597 | added = [] |
|
598 | 598 | user_data = c.user.get_api_data() |
|
599 | 599 | for ip in ip_list: |
|
600 | 600 | try: |
|
601 | 601 | user_model.add_extra_ip(user_id, ip, desc) |
|
602 | 602 | audit_logger.store_web( |
|
603 | 603 | 'user.edit.ip.add', |
|
604 | 604 | action_data={'ip': ip, 'user': user_data}, |
|
605 | 605 | user=c.rhodecode_user) |
|
606 | 606 | Session().commit() |
|
607 | 607 | added.append(ip) |
|
608 | 608 | except formencode.Invalid as error: |
|
609 | 609 | msg = error.error_dict['ip'] |
|
610 | 610 | h.flash(msg, category='error') |
|
611 | 611 | except Exception: |
|
612 | 612 | log.exception("Exception during ip saving") |
|
613 | 613 | h.flash(_('An error occurred during ip saving'), |
|
614 | 614 | category='error') |
|
615 | 615 | if added: |
|
616 | 616 | h.flash( |
|
617 | 617 | _("Added ips %s to user whitelist") % (', '.join(ip_list), ), |
|
618 | 618 | category='success') |
|
619 | 619 | if 'default_user' in request.POST: |
|
620 | 620 | return redirect(url('admin_permissions_ips')) |
|
621 | 621 | return redirect(url('edit_user_ips', user_id=user_id)) |
|
622 | 622 | |
|
623 | 623 | @HasPermissionAllDecorator('hg.admin') |
|
624 | 624 | @auth.CSRFRequired() |
|
625 | 625 | def delete_ip(self, user_id): |
|
626 | 626 | user_id = safe_int(user_id) |
|
627 | 627 | c.user = User.get_or_404(user_id) |
|
628 | 628 | |
|
629 | 629 | ip_id = request.POST.get('del_ip_id') |
|
630 | 630 | user_model = UserModel() |
|
631 | user_data = c.user.get_api_data() | |
|
631 | 632 | ip = UserIpMap.query().get(ip_id).ip_addr |
|
632 | user_data = c.user.get_api_data() | |
|
633 | 633 | user_model.delete_extra_ip(user_id, ip_id) |
|
634 | 634 | audit_logger.store_web( |
|
635 | 635 | 'user.edit.ip.delete', |
|
636 | 636 | action_data={'ip': ip, 'user': user_data}, |
|
637 | 637 | user=c.rhodecode_user) |
|
638 | 638 | Session().commit() |
|
639 | 639 | h.flash(_("Removed ip address from user whitelist"), category='success') |
|
640 | 640 | |
|
641 | 641 | if 'default_user' in request.POST: |
|
642 | 642 | return redirect(url('admin_permissions_ips')) |
|
643 | 643 | return redirect(url('edit_user_ips', user_id=user_id)) |
@@ -1,484 +1,484 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | commit controller for RhodeCode showing changes between commits |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | |
|
27 | 27 | from collections import defaultdict |
|
28 | 28 | from webob.exc import HTTPForbidden, HTTPBadRequest, HTTPNotFound |
|
29 | 29 | |
|
30 | 30 | from pylons import tmpl_context as c, request, response |
|
31 | 31 | from pylons.i18n.translation import _ |
|
32 | 32 | from pylons.controllers.util import redirect |
|
33 | 33 | |
|
34 | 34 | from rhodecode.lib import auth |
|
35 | 35 | from rhodecode.lib import diffs, codeblocks |
|
36 | 36 | from rhodecode.lib.auth import ( |
|
37 | 37 | LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous) |
|
38 | 38 | from rhodecode.lib.base import BaseRepoController, render |
|
39 | 39 | from rhodecode.lib.compat import OrderedDict |
|
40 | 40 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError |
|
41 | 41 | import rhodecode.lib.helpers as h |
|
42 | 42 | from rhodecode.lib.utils import jsonify |
|
43 | 43 | from rhodecode.lib.utils2 import safe_unicode |
|
44 | 44 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
45 | 45 | from rhodecode.lib.vcs.exceptions import ( |
|
46 | 46 | RepositoryError, CommitDoesNotExistError, NodeDoesNotExistError) |
|
47 | 47 | from rhodecode.model.db import ChangesetComment, ChangesetStatus |
|
48 | 48 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
49 | 49 | from rhodecode.model.comment import CommentsModel |
|
50 | 50 | from rhodecode.model.meta import Session |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | log = logging.getLogger(__name__) |
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | def _update_with_GET(params, GET): |
|
57 | 57 | for k in ['diff1', 'diff2', 'diff']: |
|
58 | 58 | params[k] += GET.getall(k) |
|
59 | 59 | |
|
60 | 60 | |
|
61 | 61 | def get_ignore_ws(fid, GET): |
|
62 | 62 | ig_ws_global = GET.get('ignorews') |
|
63 | 63 | ig_ws = filter(lambda k: k.startswith('WS'), GET.getall(fid)) |
|
64 | 64 | if ig_ws: |
|
65 | 65 | try: |
|
66 | 66 | return int(ig_ws[0].split(':')[-1]) |
|
67 | 67 | except Exception: |
|
68 | 68 | pass |
|
69 | 69 | return ig_ws_global |
|
70 | 70 | |
|
71 | 71 | |
|
72 | 72 | def _ignorews_url(GET, fileid=None): |
|
73 | 73 | fileid = str(fileid) if fileid else None |
|
74 | 74 | params = defaultdict(list) |
|
75 | 75 | _update_with_GET(params, GET) |
|
76 | 76 | label = _('Show whitespace') |
|
77 | 77 | tooltiplbl = _('Show whitespace for all diffs') |
|
78 | 78 | ig_ws = get_ignore_ws(fileid, GET) |
|
79 | 79 | ln_ctx = get_line_ctx(fileid, GET) |
|
80 | 80 | |
|
81 | 81 | if ig_ws is None: |
|
82 | 82 | params['ignorews'] += [1] |
|
83 | 83 | label = _('Ignore whitespace') |
|
84 | 84 | tooltiplbl = _('Ignore whitespace for all diffs') |
|
85 | 85 | ctx_key = 'context' |
|
86 | 86 | ctx_val = ln_ctx |
|
87 | 87 | |
|
88 | 88 | # if we have passed in ln_ctx pass it along to our params |
|
89 | 89 | if ln_ctx: |
|
90 | 90 | params[ctx_key] += [ctx_val] |
|
91 | 91 | |
|
92 | 92 | if fileid: |
|
93 | 93 | params['anchor'] = 'a_' + fileid |
|
94 | 94 | return h.link_to(label, h.url.current(**params), title=tooltiplbl, class_='tooltip') |
|
95 | 95 | |
|
96 | 96 | |
|
97 | 97 | def get_line_ctx(fid, GET): |
|
98 | 98 | ln_ctx_global = GET.get('context') |
|
99 | 99 | if fid: |
|
100 | 100 | ln_ctx = filter(lambda k: k.startswith('C'), GET.getall(fid)) |
|
101 | 101 | else: |
|
102 | 102 | _ln_ctx = filter(lambda k: k.startswith('C'), GET) |
|
103 | 103 | ln_ctx = GET.get(_ln_ctx[0]) if _ln_ctx else ln_ctx_global |
|
104 | 104 | if ln_ctx: |
|
105 | 105 | ln_ctx = [ln_ctx] |
|
106 | 106 | |
|
107 | 107 | if ln_ctx: |
|
108 | 108 | retval = ln_ctx[0].split(':')[-1] |
|
109 | 109 | else: |
|
110 | 110 | retval = ln_ctx_global |
|
111 | 111 | |
|
112 | 112 | try: |
|
113 | 113 | return int(retval) |
|
114 | 114 | except Exception: |
|
115 | 115 | return 3 |
|
116 | 116 | |
|
117 | 117 | |
|
118 | 118 | def _context_url(GET, fileid=None): |
|
119 | 119 | """ |
|
120 | 120 | Generates a url for context lines. |
|
121 | 121 | |
|
122 | 122 | :param fileid: |
|
123 | 123 | """ |
|
124 | 124 | |
|
125 | 125 | fileid = str(fileid) if fileid else None |
|
126 | 126 | ig_ws = get_ignore_ws(fileid, GET) |
|
127 | 127 | ln_ctx = (get_line_ctx(fileid, GET) or 3) * 2 |
|
128 | 128 | |
|
129 | 129 | params = defaultdict(list) |
|
130 | 130 | _update_with_GET(params, GET) |
|
131 | 131 | |
|
132 | 132 | if ln_ctx > 0: |
|
133 | 133 | params['context'] += [ln_ctx] |
|
134 | 134 | |
|
135 | 135 | if ig_ws: |
|
136 | 136 | ig_ws_key = 'ignorews' |
|
137 | 137 | ig_ws_val = 1 |
|
138 | 138 | params[ig_ws_key] += [ig_ws_val] |
|
139 | 139 | |
|
140 | 140 | lbl = _('Increase context') |
|
141 | 141 | tooltiplbl = _('Increase context for all diffs') |
|
142 | 142 | |
|
143 | 143 | if fileid: |
|
144 | 144 | params['anchor'] = 'a_' + fileid |
|
145 | 145 | return h.link_to(lbl, h.url.current(**params), title=tooltiplbl, class_='tooltip') |
|
146 | 146 | |
|
147 | 147 | |
|
148 | 148 | class ChangesetController(BaseRepoController): |
|
149 | 149 | |
|
150 | 150 | def __before__(self): |
|
151 | 151 | super(ChangesetController, self).__before__() |
|
152 | 152 | c.affected_files_cut_off = 60 |
|
153 | 153 | |
|
154 | 154 | def _index(self, commit_id_range, method): |
|
155 | 155 | c.ignorews_url = _ignorews_url |
|
156 | 156 | c.context_url = _context_url |
|
157 | 157 | c.fulldiff = fulldiff = request.GET.get('fulldiff') |
|
158 | 158 | |
|
159 | 159 | # fetch global flags of ignore ws or context lines |
|
160 | 160 | context_lcl = get_line_ctx('', request.GET) |
|
161 | 161 | ign_whitespace_lcl = get_ignore_ws('', request.GET) |
|
162 | 162 | |
|
163 | 163 | # diff_limit will cut off the whole diff if the limit is applied |
|
164 | 164 | # otherwise it will just hide the big files from the front-end |
|
165 | 165 | diff_limit = self.cut_off_limit_diff |
|
166 | 166 | file_limit = self.cut_off_limit_file |
|
167 | 167 | |
|
168 | 168 | # get ranges of commit ids if preset |
|
169 | 169 | commit_range = commit_id_range.split('...')[:2] |
|
170 | 170 | |
|
171 | 171 | try: |
|
172 | 172 | pre_load = ['affected_files', 'author', 'branch', 'date', |
|
173 | 173 | 'message', 'parents'] |
|
174 | 174 | |
|
175 | 175 | if len(commit_range) == 2: |
|
176 | 176 | commits = c.rhodecode_repo.get_commits( |
|
177 | 177 | start_id=commit_range[0], end_id=commit_range[1], |
|
178 | 178 | pre_load=pre_load) |
|
179 | 179 | commits = list(commits) |
|
180 | 180 | else: |
|
181 | 181 | commits = [c.rhodecode_repo.get_commit( |
|
182 | 182 | commit_id=commit_id_range, pre_load=pre_load)] |
|
183 | 183 | |
|
184 | 184 | c.commit_ranges = commits |
|
185 | 185 | if not c.commit_ranges: |
|
186 | 186 | raise RepositoryError( |
|
187 | 187 | 'The commit range returned an empty result') |
|
188 | 188 | except CommitDoesNotExistError: |
|
189 | 189 | msg = _('No such commit exists for this repository') |
|
190 | 190 | h.flash(msg, category='error') |
|
191 | 191 | raise HTTPNotFound() |
|
192 | 192 | except Exception: |
|
193 | 193 | log.exception("General failure") |
|
194 | 194 | raise HTTPNotFound() |
|
195 | 195 | |
|
196 | 196 | c.changes = OrderedDict() |
|
197 | 197 | c.lines_added = 0 |
|
198 | 198 | c.lines_deleted = 0 |
|
199 | 199 | |
|
200 | 200 | # auto collapse if we have more than limit |
|
201 | 201 | collapse_limit = diffs.DiffProcessor._collapse_commits_over |
|
202 | 202 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit |
|
203 | 203 | |
|
204 | 204 | c.commit_statuses = ChangesetStatus.STATUSES |
|
205 | 205 | c.inline_comments = [] |
|
206 | 206 | c.files = [] |
|
207 | 207 | |
|
208 | 208 | c.statuses = [] |
|
209 | 209 | c.comments = [] |
|
210 | 210 | c.unresolved_comments = [] |
|
211 | 211 | if len(c.commit_ranges) == 1: |
|
212 | 212 | commit = c.commit_ranges[0] |
|
213 | 213 | c.comments = CommentsModel().get_comments( |
|
214 | 214 | c.rhodecode_db_repo.repo_id, |
|
215 | 215 | revision=commit.raw_id) |
|
216 | 216 | c.statuses.append(ChangesetStatusModel().get_status( |
|
217 | 217 | c.rhodecode_db_repo.repo_id, commit.raw_id)) |
|
218 | 218 | # comments from PR |
|
219 | 219 | statuses = ChangesetStatusModel().get_statuses( |
|
220 | 220 | c.rhodecode_db_repo.repo_id, commit.raw_id, |
|
221 | 221 | with_revisions=True) |
|
222 | 222 | prs = set(st.pull_request for st in statuses |
|
223 | 223 | if st.pull_request is not None) |
|
224 | 224 | # from associated statuses, check the pull requests, and |
|
225 | 225 | # show comments from them |
|
226 | 226 | for pr in prs: |
|
227 | 227 | c.comments.extend(pr.comments) |
|
228 | 228 | |
|
229 | 229 | c.unresolved_comments = CommentsModel()\ |
|
230 | 230 | .get_commit_unresolved_todos(commit.raw_id) |
|
231 | 231 | |
|
232 | 232 | # Iterate over ranges (default commit view is always one commit) |
|
233 | 233 | for commit in c.commit_ranges: |
|
234 | 234 | c.changes[commit.raw_id] = [] |
|
235 | 235 | |
|
236 | 236 | commit2 = commit |
|
237 | 237 | commit1 = commit.parents[0] if commit.parents else EmptyCommit() |
|
238 | 238 | |
|
239 | 239 | _diff = c.rhodecode_repo.get_diff( |
|
240 | 240 | commit1, commit2, |
|
241 | 241 | ignore_whitespace=ign_whitespace_lcl, context=context_lcl) |
|
242 | 242 | diff_processor = diffs.DiffProcessor( |
|
243 | 243 | _diff, format='newdiff', diff_limit=diff_limit, |
|
244 | 244 | file_limit=file_limit, show_full_diff=fulldiff) |
|
245 | 245 | |
|
246 | 246 | commit_changes = OrderedDict() |
|
247 | 247 | if method == 'show': |
|
248 | 248 | _parsed = diff_processor.prepare() |
|
249 | 249 | c.limited_diff = isinstance(_parsed, diffs.LimitedDiffContainer) |
|
250 | 250 | |
|
251 | 251 | _parsed = diff_processor.prepare() |
|
252 | 252 | |
|
253 | 253 | def _node_getter(commit): |
|
254 | 254 | def get_node(fname): |
|
255 | 255 | try: |
|
256 | 256 | return commit.get_node(fname) |
|
257 | 257 | except NodeDoesNotExistError: |
|
258 | 258 | return None |
|
259 | 259 | return get_node |
|
260 | 260 | |
|
261 | 261 | inline_comments = CommentsModel().get_inline_comments( |
|
262 | 262 | c.rhodecode_db_repo.repo_id, revision=commit.raw_id) |
|
263 | 263 | c.inline_cnt = CommentsModel().get_inline_comments_count( |
|
264 | 264 | inline_comments) |
|
265 | 265 | |
|
266 | 266 | diffset = codeblocks.DiffSet( |
|
267 | 267 | repo_name=c.repo_name, |
|
268 | 268 | source_node_getter=_node_getter(commit1), |
|
269 | 269 | target_node_getter=_node_getter(commit2), |
|
270 | 270 | comments=inline_comments |
|
271 | 271 | ).render_patchset(_parsed, commit1.raw_id, commit2.raw_id) |
|
272 | 272 | c.changes[commit.raw_id] = diffset |
|
273 | 273 | else: |
|
274 | 274 | # downloads/raw we only need RAW diff nothing else |
|
275 | 275 | diff = diff_processor.as_raw() |
|
276 | 276 | c.changes[commit.raw_id] = [None, None, None, None, diff, None, None] |
|
277 | 277 | |
|
278 | 278 | # sort comments by how they were generated |
|
279 | 279 | c.comments = sorted(c.comments, key=lambda x: x.comment_id) |
|
280 | 280 | |
|
281 | 281 | if len(c.commit_ranges) == 1: |
|
282 | 282 | c.commit = c.commit_ranges[0] |
|
283 | 283 | c.parent_tmpl = ''.join( |
|
284 | 284 | '# Parent %s\n' % x.raw_id for x in c.commit.parents) |
|
285 | 285 | if method == 'download': |
|
286 | 286 | response.content_type = 'text/plain' |
|
287 | 287 | response.content_disposition = ( |
|
288 | 288 | 'attachment; filename=%s.diff' % commit_id_range[:12]) |
|
289 | 289 | return diff |
|
290 | 290 | elif method == 'patch': |
|
291 | 291 | response.content_type = 'text/plain' |
|
292 | 292 | c.diff = safe_unicode(diff) |
|
293 | 293 | return render('changeset/patch_changeset.mako') |
|
294 | 294 | elif method == 'raw': |
|
295 | 295 | response.content_type = 'text/plain' |
|
296 | 296 | return diff |
|
297 | 297 | elif method == 'show': |
|
298 | 298 | if len(c.commit_ranges) == 1: |
|
299 | 299 | return render('changeset/changeset.mako') |
|
300 | 300 | else: |
|
301 | 301 | c.ancestor = None |
|
302 | 302 | c.target_repo = c.rhodecode_db_repo |
|
303 | 303 | return render('changeset/changeset_range.mako') |
|
304 | 304 | |
|
305 | 305 | @LoginRequired() |
|
306 | 306 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
307 | 307 | 'repository.admin') |
|
308 | 308 | def index(self, revision, method='show'): |
|
309 | 309 | return self._index(revision, method=method) |
|
310 | 310 | |
|
311 | 311 | @LoginRequired() |
|
312 | 312 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
313 | 313 | 'repository.admin') |
|
314 | 314 | def changeset_raw(self, revision): |
|
315 | 315 | return self._index(revision, method='raw') |
|
316 | 316 | |
|
317 | 317 | @LoginRequired() |
|
318 | 318 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
319 | 319 | 'repository.admin') |
|
320 | 320 | def changeset_patch(self, revision): |
|
321 | 321 | return self._index(revision, method='patch') |
|
322 | 322 | |
|
323 | 323 | @LoginRequired() |
|
324 | 324 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
325 | 325 | 'repository.admin') |
|
326 | 326 | def changeset_download(self, revision): |
|
327 | 327 | return self._index(revision, method='download') |
|
328 | 328 | |
|
329 | 329 | @LoginRequired() |
|
330 | 330 | @NotAnonymous() |
|
331 | 331 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
332 | 332 | 'repository.admin') |
|
333 | 333 | @auth.CSRFRequired() |
|
334 | 334 | @jsonify |
|
335 | 335 | def comment(self, repo_name, revision): |
|
336 | 336 | commit_id = revision |
|
337 | 337 | status = request.POST.get('changeset_status', None) |
|
338 | 338 | text = request.POST.get('text') |
|
339 | 339 | comment_type = request.POST.get('comment_type') |
|
340 | 340 | resolves_comment_id = request.POST.get('resolves_comment_id', None) |
|
341 | 341 | |
|
342 | 342 | if status: |
|
343 | 343 | text = text or (_('Status change %(transition_icon)s %(status)s') |
|
344 | 344 | % {'transition_icon': '>', |
|
345 | 345 | 'status': ChangesetStatus.get_status_lbl(status)}) |
|
346 | 346 | |
|
347 | 347 | multi_commit_ids = [] |
|
348 | 348 | for _commit_id in request.POST.get('commit_ids', '').split(','): |
|
349 | 349 | if _commit_id not in ['', None, EmptyCommit.raw_id]: |
|
350 | 350 | if _commit_id not in multi_commit_ids: |
|
351 | 351 | multi_commit_ids.append(_commit_id) |
|
352 | 352 | |
|
353 | 353 | commit_ids = multi_commit_ids or [commit_id] |
|
354 | 354 | |
|
355 | 355 | comment = None |
|
356 | 356 | for current_id in filter(None, commit_ids): |
|
357 | 357 | c.co = comment = CommentsModel().create( |
|
358 | 358 | text=text, |
|
359 | 359 | repo=c.rhodecode_db_repo.repo_id, |
|
360 | 360 | user=c.rhodecode_user.user_id, |
|
361 | 361 | commit_id=current_id, |
|
362 | 362 | f_path=request.POST.get('f_path'), |
|
363 | 363 | line_no=request.POST.get('line'), |
|
364 | 364 | status_change=(ChangesetStatus.get_status_lbl(status) |
|
365 | 365 | if status else None), |
|
366 | 366 | status_change_type=status, |
|
367 | 367 | comment_type=comment_type, |
|
368 | 368 | resolves_comment_id=resolves_comment_id |
|
369 | 369 | ) |
|
370 | 370 | |
|
371 | 371 | # get status if set ! |
|
372 | 372 | if status: |
|
373 | 373 | # if latest status was from pull request and it's closed |
|
374 | 374 | # disallow changing status ! |
|
375 | 375 | # dont_allow_on_closed_pull_request = True ! |
|
376 | 376 | |
|
377 | 377 | try: |
|
378 | 378 | ChangesetStatusModel().set_status( |
|
379 | 379 | c.rhodecode_db_repo.repo_id, |
|
380 | 380 | status, |
|
381 | 381 | c.rhodecode_user.user_id, |
|
382 | 382 | comment, |
|
383 | 383 | revision=current_id, |
|
384 | 384 | dont_allow_on_closed_pull_request=True |
|
385 | 385 | ) |
|
386 | 386 | except StatusChangeOnClosedPullRequestError: |
|
387 | 387 | msg = _('Changing the status of a commit associated with ' |
|
388 | 388 | 'a closed pull request is not allowed') |
|
389 | 389 | log.exception(msg) |
|
390 | 390 | h.flash(msg, category='warning') |
|
391 | 391 | return redirect(h.url( |
|
392 | 392 | 'changeset_home', repo_name=repo_name, |
|
393 | 393 | revision=current_id)) |
|
394 | 394 | |
|
395 | 395 | # finalize, commit and redirect |
|
396 | 396 | Session().commit() |
|
397 | 397 | |
|
398 | 398 | data = { |
|
399 | 399 | 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))), |
|
400 | 400 | } |
|
401 | 401 | if comment: |
|
402 | 402 | data.update(comment.get_dict()) |
|
403 | 403 | data.update({'rendered_text': |
|
404 | 404 | render('changeset/changeset_comment_block.mako')}) |
|
405 | 405 | |
|
406 | 406 | return data |
|
407 | 407 | |
|
408 | 408 | @LoginRequired() |
|
409 | 409 | @NotAnonymous() |
|
410 | 410 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
411 | 411 | 'repository.admin') |
|
412 | 412 | @auth.CSRFRequired() |
|
413 | 413 | def preview_comment(self): |
|
414 | 414 | # Technically a CSRF token is not needed as no state changes with this |
|
415 | 415 | # call. However, as this is a POST is better to have it, so automated |
|
416 | 416 | # tools don't flag it as potential CSRF. |
|
417 | 417 | # Post is required because the payload could be bigger than the maximum |
|
418 | 418 | # allowed by GET. |
|
419 | 419 | if not request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
420 | 420 | raise HTTPBadRequest() |
|
421 | 421 | text = request.POST.get('text') |
|
422 | 422 | renderer = request.POST.get('renderer') or 'rst' |
|
423 | 423 | if text: |
|
424 | 424 | return h.render(text, renderer=renderer, mentions=True) |
|
425 | 425 | return '' |
|
426 | 426 | |
|
427 | 427 | @LoginRequired() |
|
428 | 428 | @NotAnonymous() |
|
429 | 429 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
430 | 430 | 'repository.admin') |
|
431 | 431 | @auth.CSRFRequired() |
|
432 | 432 | @jsonify |
|
433 | 433 | def delete_comment(self, repo_name, comment_id): |
|
434 | 434 | comment = ChangesetComment.get(comment_id) |
|
435 | 435 | if not comment: |
|
436 | 436 | log.debug('Comment with id:%s not found, skipping', comment_id) |
|
437 | 437 | # comment already deleted in another call probably |
|
438 | 438 | return True |
|
439 | 439 | |
|
440 | 440 | owner = (comment.author.user_id == c.rhodecode_user.user_id) |
|
441 | 441 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name) |
|
442 | 442 | if h.HasPermissionAny('hg.admin')() or is_repo_admin or owner: |
|
443 | CommentsModel().delete(comment=comment) | |
|
443 | CommentsModel().delete(comment=comment, user=c.rhodecode_user) | |
|
444 | 444 | Session().commit() |
|
445 | 445 | return True |
|
446 | 446 | else: |
|
447 | 447 | raise HTTPForbidden() |
|
448 | 448 | |
|
449 | 449 | @LoginRequired() |
|
450 | 450 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
451 | 451 | 'repository.admin') |
|
452 | 452 | @jsonify |
|
453 | 453 | def changeset_info(self, repo_name, revision): |
|
454 | 454 | if request.is_xhr: |
|
455 | 455 | try: |
|
456 | 456 | return c.rhodecode_repo.get_commit(commit_id=revision) |
|
457 | 457 | except CommitDoesNotExistError as e: |
|
458 | 458 | return EmptyCommit(message=str(e)) |
|
459 | 459 | else: |
|
460 | 460 | raise HTTPBadRequest() |
|
461 | 461 | |
|
462 | 462 | @LoginRequired() |
|
463 | 463 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
464 | 464 | 'repository.admin') |
|
465 | 465 | @jsonify |
|
466 | 466 | def changeset_children(self, repo_name, revision): |
|
467 | 467 | if request.is_xhr: |
|
468 | 468 | commit = c.rhodecode_repo.get_commit(commit_id=revision) |
|
469 | 469 | result = {"results": commit.children} |
|
470 | 470 | return result |
|
471 | 471 | else: |
|
472 | 472 | raise HTTPBadRequest() |
|
473 | 473 | |
|
474 | 474 | @LoginRequired() |
|
475 | 475 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
476 | 476 | 'repository.admin') |
|
477 | 477 | @jsonify |
|
478 | 478 | def changeset_parents(self, repo_name, revision): |
|
479 | 479 | if request.is_xhr: |
|
480 | 480 | commit = c.rhodecode_repo.get_commit(commit_id=revision) |
|
481 | 481 | result = {"results": commit.parents} |
|
482 | 482 | return result |
|
483 | 483 | else: |
|
484 | 484 | raise HTTPBadRequest() |
@@ -1,1110 +1,1110 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Files controller for RhodeCode Enterprise |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import itertools |
|
26 | 26 | import logging |
|
27 | 27 | import os |
|
28 | 28 | import shutil |
|
29 | 29 | import tempfile |
|
30 | 30 | |
|
31 | 31 | from pylons import request, response, tmpl_context as c, url |
|
32 | 32 | from pylons.i18n.translation import _ |
|
33 | 33 | from pylons.controllers.util import redirect |
|
34 | 34 | from webob.exc import HTTPNotFound, HTTPBadRequest |
|
35 | 35 | |
|
36 | 36 | from rhodecode.controllers.utils import parse_path_ref |
|
37 | 37 | from rhodecode.lib import diffs, helpers as h, caches |
|
38 | 38 | from rhodecode.lib import audit_logger |
|
39 | 39 | from rhodecode.lib.codeblocks import ( |
|
40 | 40 | filenode_as_lines_tokens, filenode_as_annotated_lines_tokens) |
|
41 |
from rhodecode.lib.utils import jsonify |
|
|
41 | from rhodecode.lib.utils import jsonify | |
|
42 | 42 | from rhodecode.lib.utils2 import ( |
|
43 | 43 | convert_line_endings, detect_mode, safe_str, str2bool) |
|
44 | 44 | from rhodecode.lib.auth import ( |
|
45 | 45 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired, XHRRequired) |
|
46 | 46 | from rhodecode.lib.base import BaseRepoController, render |
|
47 | 47 | from rhodecode.lib.vcs import path as vcspath |
|
48 | 48 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
49 | 49 | from rhodecode.lib.vcs.conf import settings |
|
50 | 50 | from rhodecode.lib.vcs.exceptions import ( |
|
51 | 51 | RepositoryError, CommitDoesNotExistError, EmptyRepositoryError, |
|
52 | 52 | ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError, |
|
53 | 53 | NodeDoesNotExistError, CommitError, NodeError) |
|
54 | 54 | from rhodecode.lib.vcs.nodes import FileNode |
|
55 | 55 | |
|
56 | 56 | from rhodecode.model.repo import RepoModel |
|
57 | 57 | from rhodecode.model.scm import ScmModel |
|
58 | 58 | from rhodecode.model.db import Repository |
|
59 | 59 | |
|
60 | 60 | from rhodecode.controllers.changeset import ( |
|
61 | 61 | _ignorews_url, _context_url, get_line_ctx, get_ignore_ws) |
|
62 | 62 | from rhodecode.lib.exceptions import NonRelativePathError |
|
63 | 63 | |
|
64 | 64 | log = logging.getLogger(__name__) |
|
65 | 65 | |
|
66 | 66 | |
|
67 | 67 | class FilesController(BaseRepoController): |
|
68 | 68 | |
|
69 | 69 | def __before__(self): |
|
70 | 70 | super(FilesController, self).__before__() |
|
71 | 71 | c.cut_off_limit = self.cut_off_limit_file |
|
72 | 72 | |
|
73 | 73 | def _get_default_encoding(self): |
|
74 | 74 | enc_list = getattr(c, 'default_encodings', []) |
|
75 | 75 | return enc_list[0] if enc_list else 'UTF-8' |
|
76 | 76 | |
|
77 | 77 | def __get_commit_or_redirect(self, commit_id, repo_name, |
|
78 | 78 | redirect_after=True): |
|
79 | 79 | """ |
|
80 | 80 | This is a safe way to get commit. If an error occurs it redirects to |
|
81 | 81 | tip with proper message |
|
82 | 82 | |
|
83 | 83 | :param commit_id: id of commit to fetch |
|
84 | 84 | :param repo_name: repo name to redirect after |
|
85 | 85 | :param redirect_after: toggle redirection |
|
86 | 86 | """ |
|
87 | 87 | try: |
|
88 | 88 | return c.rhodecode_repo.get_commit(commit_id) |
|
89 | 89 | except EmptyRepositoryError: |
|
90 | 90 | if not redirect_after: |
|
91 | 91 | return None |
|
92 | 92 | url_ = url('files_add_home', |
|
93 | 93 | repo_name=c.repo_name, |
|
94 | 94 | revision=0, f_path='', anchor='edit') |
|
95 | 95 | if h.HasRepoPermissionAny( |
|
96 | 96 | 'repository.write', 'repository.admin')(c.repo_name): |
|
97 | 97 | add_new = h.link_to( |
|
98 | 98 | _('Click here to add a new file.'), |
|
99 | 99 | url_, class_="alert-link") |
|
100 | 100 | else: |
|
101 | 101 | add_new = "" |
|
102 | 102 | h.flash(h.literal( |
|
103 | 103 | _('There are no files yet. %s') % add_new), category='warning') |
|
104 | 104 | redirect(h.route_path('repo_summary', repo_name=repo_name)) |
|
105 | 105 | except (CommitDoesNotExistError, LookupError): |
|
106 | 106 | msg = _('No such commit exists for this repository') |
|
107 | 107 | h.flash(msg, category='error') |
|
108 | 108 | raise HTTPNotFound() |
|
109 | 109 | except RepositoryError as e: |
|
110 | 110 | h.flash(safe_str(e), category='error') |
|
111 | 111 | raise HTTPNotFound() |
|
112 | 112 | |
|
113 | 113 | def __get_filenode_or_redirect(self, repo_name, commit, path): |
|
114 | 114 | """ |
|
115 | 115 | Returns file_node, if error occurs or given path is directory, |
|
116 | 116 | it'll redirect to top level path |
|
117 | 117 | |
|
118 | 118 | :param repo_name: repo_name |
|
119 | 119 | :param commit: given commit |
|
120 | 120 | :param path: path to lookup |
|
121 | 121 | """ |
|
122 | 122 | try: |
|
123 | 123 | file_node = commit.get_node(path) |
|
124 | 124 | if file_node.is_dir(): |
|
125 | 125 | raise RepositoryError('The given path is a directory') |
|
126 | 126 | except CommitDoesNotExistError: |
|
127 | 127 | msg = _('No such commit exists for this repository') |
|
128 | 128 | log.exception(msg) |
|
129 | 129 | h.flash(msg, category='error') |
|
130 | 130 | raise HTTPNotFound() |
|
131 | 131 | except RepositoryError as e: |
|
132 | 132 | h.flash(safe_str(e), category='error') |
|
133 | 133 | raise HTTPNotFound() |
|
134 | 134 | |
|
135 | 135 | return file_node |
|
136 | 136 | |
|
137 | 137 | def __get_tree_cache_manager(self, repo_name, namespace_type): |
|
138 | 138 | _namespace = caches.get_repo_namespace_key(namespace_type, repo_name) |
|
139 | 139 | return caches.get_cache_manager('repo_cache_long', _namespace) |
|
140 | 140 | |
|
141 | 141 | def _get_tree_at_commit(self, repo_name, commit_id, f_path, |
|
142 | 142 | full_load=False, force=False): |
|
143 | 143 | def _cached_tree(): |
|
144 | 144 | log.debug('Generating cached file tree for %s, %s, %s', |
|
145 | 145 | repo_name, commit_id, f_path) |
|
146 | 146 | c.full_load = full_load |
|
147 | 147 | return render('files/files_browser_tree.mako') |
|
148 | 148 | |
|
149 | 149 | cache_manager = self.__get_tree_cache_manager( |
|
150 | 150 | repo_name, caches.FILE_TREE) |
|
151 | 151 | |
|
152 | 152 | cache_key = caches.compute_key_from_params( |
|
153 | 153 | repo_name, commit_id, f_path) |
|
154 | 154 | |
|
155 | 155 | if force: |
|
156 | 156 | # we want to force recompute of caches |
|
157 | 157 | cache_manager.remove_value(cache_key) |
|
158 | 158 | |
|
159 | 159 | return cache_manager.get(cache_key, createfunc=_cached_tree) |
|
160 | 160 | |
|
161 | 161 | def _get_nodelist_at_commit(self, repo_name, commit_id, f_path): |
|
162 | 162 | def _cached_nodes(): |
|
163 | 163 | log.debug('Generating cached nodelist for %s, %s, %s', |
|
164 | 164 | repo_name, commit_id, f_path) |
|
165 | 165 | _d, _f = ScmModel().get_nodes( |
|
166 | 166 | repo_name, commit_id, f_path, flat=False) |
|
167 | 167 | return _d + _f |
|
168 | 168 | |
|
169 | 169 | cache_manager = self.__get_tree_cache_manager( |
|
170 | 170 | repo_name, caches.FILE_SEARCH_TREE_META) |
|
171 | 171 | |
|
172 | 172 | cache_key = caches.compute_key_from_params( |
|
173 | 173 | repo_name, commit_id, f_path) |
|
174 | 174 | return cache_manager.get(cache_key, createfunc=_cached_nodes) |
|
175 | 175 | |
|
176 | 176 | @LoginRequired() |
|
177 | 177 | @HasRepoPermissionAnyDecorator( |
|
178 | 178 | 'repository.read', 'repository.write', 'repository.admin') |
|
179 | 179 | def index( |
|
180 | 180 | self, repo_name, revision, f_path, annotate=False, rendered=False): |
|
181 | 181 | commit_id = revision |
|
182 | 182 | |
|
183 | 183 | # redirect to given commit_id from form if given |
|
184 | 184 | get_commit_id = request.GET.get('at_rev', None) |
|
185 | 185 | if get_commit_id: |
|
186 | 186 | self.__get_commit_or_redirect(get_commit_id, repo_name) |
|
187 | 187 | |
|
188 | 188 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
189 | 189 | c.branch = request.GET.get('branch', None) |
|
190 | 190 | c.f_path = f_path |
|
191 | 191 | c.annotate = annotate |
|
192 | 192 | # default is false, but .rst/.md files later are autorendered, we can |
|
193 | 193 | # overwrite autorendering by setting this GET flag |
|
194 | 194 | c.renderer = rendered or not request.GET.get('no-render', False) |
|
195 | 195 | |
|
196 | 196 | # prev link |
|
197 | 197 | try: |
|
198 | 198 | prev_commit = c.commit.prev(c.branch) |
|
199 | 199 | c.prev_commit = prev_commit |
|
200 | 200 | c.url_prev = url('files_home', repo_name=c.repo_name, |
|
201 | 201 | revision=prev_commit.raw_id, f_path=f_path) |
|
202 | 202 | if c.branch: |
|
203 | 203 | c.url_prev += '?branch=%s' % c.branch |
|
204 | 204 | except (CommitDoesNotExistError, VCSError): |
|
205 | 205 | c.url_prev = '#' |
|
206 | 206 | c.prev_commit = EmptyCommit() |
|
207 | 207 | |
|
208 | 208 | # next link |
|
209 | 209 | try: |
|
210 | 210 | next_commit = c.commit.next(c.branch) |
|
211 | 211 | c.next_commit = next_commit |
|
212 | 212 | c.url_next = url('files_home', repo_name=c.repo_name, |
|
213 | 213 | revision=next_commit.raw_id, f_path=f_path) |
|
214 | 214 | if c.branch: |
|
215 | 215 | c.url_next += '?branch=%s' % c.branch |
|
216 | 216 | except (CommitDoesNotExistError, VCSError): |
|
217 | 217 | c.url_next = '#' |
|
218 | 218 | c.next_commit = EmptyCommit() |
|
219 | 219 | |
|
220 | 220 | # files or dirs |
|
221 | 221 | try: |
|
222 | 222 | c.file = c.commit.get_node(f_path) |
|
223 | 223 | c.file_author = True |
|
224 | 224 | c.file_tree = '' |
|
225 | 225 | if c.file.is_file(): |
|
226 | 226 | c.lf_node = c.file.get_largefile_node() |
|
227 | 227 | |
|
228 | 228 | c.file_source_page = 'true' |
|
229 | 229 | c.file_last_commit = c.file.last_commit |
|
230 | 230 | if c.file.size < self.cut_off_limit_file: |
|
231 | 231 | if c.annotate: # annotation has precedence over renderer |
|
232 | 232 | c.annotated_lines = filenode_as_annotated_lines_tokens( |
|
233 | 233 | c.file |
|
234 | 234 | ) |
|
235 | 235 | else: |
|
236 | 236 | c.renderer = ( |
|
237 | 237 | c.renderer and h.renderer_from_filename(c.file.path) |
|
238 | 238 | ) |
|
239 | 239 | if not c.renderer: |
|
240 | 240 | c.lines = filenode_as_lines_tokens(c.file) |
|
241 | 241 | |
|
242 | 242 | c.on_branch_head = self._is_valid_head( |
|
243 | 243 | commit_id, c.rhodecode_repo) |
|
244 | 244 | |
|
245 | 245 | branch = c.commit.branch if ( |
|
246 | 246 | c.commit.branch and '/' not in c.commit.branch) else None |
|
247 | 247 | c.branch_or_raw_id = branch or c.commit.raw_id |
|
248 | 248 | c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id) |
|
249 | 249 | |
|
250 | 250 | author = c.file_last_commit.author |
|
251 | 251 | c.authors = [(h.email(author), |
|
252 | 252 | h.person(author, 'username_or_name_or_email'))] |
|
253 | 253 | else: |
|
254 | 254 | c.file_source_page = 'false' |
|
255 | 255 | c.authors = [] |
|
256 | 256 | c.file_tree = self._get_tree_at_commit( |
|
257 | 257 | repo_name, c.commit.raw_id, f_path) |
|
258 | 258 | |
|
259 | 259 | except RepositoryError as e: |
|
260 | 260 | h.flash(safe_str(e), category='error') |
|
261 | 261 | raise HTTPNotFound() |
|
262 | 262 | |
|
263 | 263 | if request.environ.get('HTTP_X_PJAX'): |
|
264 | 264 | return render('files/files_pjax.mako') |
|
265 | 265 | |
|
266 | 266 | return render('files/files.mako') |
|
267 | 267 | |
|
268 | 268 | @LoginRequired() |
|
269 | 269 | @HasRepoPermissionAnyDecorator( |
|
270 | 270 | 'repository.read', 'repository.write', 'repository.admin') |
|
271 | 271 | def annotate_previous(self, repo_name, revision, f_path): |
|
272 | 272 | |
|
273 | 273 | commit_id = revision |
|
274 | 274 | commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
275 | 275 | prev_commit_id = commit.raw_id |
|
276 | 276 | |
|
277 | 277 | f_path = f_path |
|
278 | 278 | is_file = False |
|
279 | 279 | try: |
|
280 | 280 | _file = commit.get_node(f_path) |
|
281 | 281 | is_file = _file.is_file() |
|
282 | 282 | except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError): |
|
283 | 283 | pass |
|
284 | 284 | |
|
285 | 285 | if is_file: |
|
286 | 286 | history = commit.get_file_history(f_path) |
|
287 | 287 | prev_commit_id = history[1].raw_id \ |
|
288 | 288 | if len(history) > 1 else prev_commit_id |
|
289 | 289 | |
|
290 | 290 | return redirect(h.url( |
|
291 | 291 | 'files_annotate_home', repo_name=repo_name, |
|
292 | 292 | revision=prev_commit_id, f_path=f_path)) |
|
293 | 293 | |
|
294 | 294 | @LoginRequired() |
|
295 | 295 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
296 | 296 | 'repository.admin') |
|
297 | 297 | @jsonify |
|
298 | 298 | def history(self, repo_name, revision, f_path): |
|
299 | 299 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
300 | 300 | f_path = f_path |
|
301 | 301 | _file = commit.get_node(f_path) |
|
302 | 302 | if _file.is_file(): |
|
303 | 303 | file_history, _hist = self._get_node_history(commit, f_path) |
|
304 | 304 | |
|
305 | 305 | res = [] |
|
306 | 306 | for obj in file_history: |
|
307 | 307 | res.append({ |
|
308 | 308 | 'text': obj[1], |
|
309 | 309 | 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]] |
|
310 | 310 | }) |
|
311 | 311 | |
|
312 | 312 | data = { |
|
313 | 313 | 'more': False, |
|
314 | 314 | 'results': res |
|
315 | 315 | } |
|
316 | 316 | return data |
|
317 | 317 | |
|
318 | 318 | @LoginRequired() |
|
319 | 319 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
320 | 320 | 'repository.admin') |
|
321 | 321 | def authors(self, repo_name, revision, f_path): |
|
322 | 322 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
323 | 323 | file_node = commit.get_node(f_path) |
|
324 | 324 | if file_node.is_file(): |
|
325 | 325 | c.file_last_commit = file_node.last_commit |
|
326 | 326 | if request.GET.get('annotate') == '1': |
|
327 | 327 | # use _hist from annotation if annotation mode is on |
|
328 | 328 | commit_ids = set(x[1] for x in file_node.annotate) |
|
329 | 329 | _hist = ( |
|
330 | 330 | c.rhodecode_repo.get_commit(commit_id) |
|
331 | 331 | for commit_id in commit_ids) |
|
332 | 332 | else: |
|
333 | 333 | _f_history, _hist = self._get_node_history(commit, f_path) |
|
334 | 334 | c.file_author = False |
|
335 | 335 | c.authors = [] |
|
336 | 336 | for author in set(commit.author for commit in _hist): |
|
337 | 337 | c.authors.append(( |
|
338 | 338 | h.email(author), |
|
339 | 339 | h.person(author, 'username_or_name_or_email'))) |
|
340 | 340 | return render('files/file_authors_box.mako') |
|
341 | 341 | |
|
342 | 342 | @LoginRequired() |
|
343 | 343 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
344 | 344 | 'repository.admin') |
|
345 | 345 | def rawfile(self, repo_name, revision, f_path): |
|
346 | 346 | """ |
|
347 | 347 | Action for download as raw |
|
348 | 348 | """ |
|
349 | 349 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
350 | 350 | file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path) |
|
351 | 351 | |
|
352 | 352 | if request.GET.get('lf'): |
|
353 | 353 | # only if lf get flag is passed, we download this file |
|
354 | 354 | # as LFS/Largefile |
|
355 | 355 | lf_node = file_node.get_largefile_node() |
|
356 | 356 | if lf_node: |
|
357 | 357 | # overwrite our pointer with the REAL large-file |
|
358 | 358 | file_node = lf_node |
|
359 | 359 | |
|
360 | 360 | response.content_disposition = 'attachment; filename=%s' % \ |
|
361 | 361 | safe_str(f_path.split(Repository.NAME_SEP)[-1]) |
|
362 | 362 | |
|
363 | 363 | response.content_type = file_node.mimetype |
|
364 | 364 | charset = self._get_default_encoding() |
|
365 | 365 | if charset: |
|
366 | 366 | response.charset = charset |
|
367 | 367 | |
|
368 | 368 | return file_node.content |
|
369 | 369 | |
|
370 | 370 | @LoginRequired() |
|
371 | 371 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
372 | 372 | 'repository.admin') |
|
373 | 373 | def raw(self, repo_name, revision, f_path): |
|
374 | 374 | """ |
|
375 | 375 | Action for show as raw, some mimetypes are "rendered", |
|
376 | 376 | those include images, icons. |
|
377 | 377 | """ |
|
378 | 378 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
379 | 379 | file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path) |
|
380 | 380 | |
|
381 | 381 | raw_mimetype_mapping = { |
|
382 | 382 | # map original mimetype to a mimetype used for "show as raw" |
|
383 | 383 | # you can also provide a content-disposition to override the |
|
384 | 384 | # default "attachment" disposition. |
|
385 | 385 | # orig_type: (new_type, new_dispo) |
|
386 | 386 | |
|
387 | 387 | # show images inline: |
|
388 | 388 | # Do not re-add SVG: it is unsafe and permits XSS attacks. One can |
|
389 | 389 | # for example render an SVG with javascript inside or even render |
|
390 | 390 | # HTML. |
|
391 | 391 | 'image/x-icon': ('image/x-icon', 'inline'), |
|
392 | 392 | 'image/png': ('image/png', 'inline'), |
|
393 | 393 | 'image/gif': ('image/gif', 'inline'), |
|
394 | 394 | 'image/jpeg': ('image/jpeg', 'inline'), |
|
395 | 395 | 'application/pdf': ('application/pdf', 'inline'), |
|
396 | 396 | } |
|
397 | 397 | |
|
398 | 398 | mimetype = file_node.mimetype |
|
399 | 399 | try: |
|
400 | 400 | mimetype, dispo = raw_mimetype_mapping[mimetype] |
|
401 | 401 | except KeyError: |
|
402 | 402 | # we don't know anything special about this, handle it safely |
|
403 | 403 | if file_node.is_binary: |
|
404 | 404 | # do same as download raw for binary files |
|
405 | 405 | mimetype, dispo = 'application/octet-stream', 'attachment' |
|
406 | 406 | else: |
|
407 | 407 | # do not just use the original mimetype, but force text/plain, |
|
408 | 408 | # otherwise it would serve text/html and that might be unsafe. |
|
409 | 409 | # Note: underlying vcs library fakes text/plain mimetype if the |
|
410 | 410 | # mimetype can not be determined and it thinks it is not |
|
411 | 411 | # binary.This might lead to erroneous text display in some |
|
412 | 412 | # cases, but helps in other cases, like with text files |
|
413 | 413 | # without extension. |
|
414 | 414 | mimetype, dispo = 'text/plain', 'inline' |
|
415 | 415 | |
|
416 | 416 | if dispo == 'attachment': |
|
417 | 417 | dispo = 'attachment; filename=%s' % safe_str( |
|
418 | 418 | f_path.split(os.sep)[-1]) |
|
419 | 419 | |
|
420 | 420 | response.content_disposition = dispo |
|
421 | 421 | response.content_type = mimetype |
|
422 | 422 | charset = self._get_default_encoding() |
|
423 | 423 | if charset: |
|
424 | 424 | response.charset = charset |
|
425 | 425 | return file_node.content |
|
426 | 426 | |
|
427 | 427 | @CSRFRequired() |
|
428 | 428 | @LoginRequired() |
|
429 | 429 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
430 | 430 | def delete(self, repo_name, revision, f_path): |
|
431 | 431 | commit_id = revision |
|
432 | 432 | |
|
433 | 433 | repo = c.rhodecode_db_repo |
|
434 | 434 | if repo.enable_locking and repo.locked[0]: |
|
435 | 435 | h.flash(_('This repository has been locked by %s on %s') |
|
436 | 436 | % (h.person_by_id(repo.locked[0]), |
|
437 | 437 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
438 | 438 | 'warning') |
|
439 | 439 | return redirect(h.url('files_home', |
|
440 | 440 | repo_name=repo_name, revision='tip')) |
|
441 | 441 | |
|
442 | 442 | if not self._is_valid_head(commit_id, repo.scm_instance()): |
|
443 | 443 | h.flash(_('You can only delete files with revision ' |
|
444 | 444 | 'being a valid branch '), category='warning') |
|
445 | 445 | return redirect(h.url('files_home', |
|
446 | 446 | repo_name=repo_name, revision='tip', |
|
447 | 447 | f_path=f_path)) |
|
448 | 448 | |
|
449 | 449 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
450 | 450 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) |
|
451 | 451 | |
|
452 | 452 | c.default_message = _( |
|
453 | 453 | 'Deleted file %s via RhodeCode Enterprise') % (f_path) |
|
454 | 454 | c.f_path = f_path |
|
455 | 455 | node_path = f_path |
|
456 | 456 | author = c.rhodecode_user.full_contact |
|
457 | 457 | message = request.POST.get('message') or c.default_message |
|
458 | 458 | try: |
|
459 | 459 | nodes = { |
|
460 | 460 | node_path: { |
|
461 | 461 | 'content': '' |
|
462 | 462 | } |
|
463 | 463 | } |
|
464 | 464 | self.scm_model.delete_nodes( |
|
465 | 465 | user=c.rhodecode_user.user_id, repo=c.rhodecode_db_repo, |
|
466 | 466 | message=message, |
|
467 | 467 | nodes=nodes, |
|
468 | 468 | parent_commit=c.commit, |
|
469 | 469 | author=author, |
|
470 | 470 | ) |
|
471 | 471 | |
|
472 | 472 | h.flash(_('Successfully deleted file %s') % f_path, |
|
473 | 473 | category='success') |
|
474 | 474 | except Exception: |
|
475 | 475 | msg = _('Error occurred during commit') |
|
476 | 476 | log.exception(msg) |
|
477 | 477 | h.flash(msg, category='error') |
|
478 | 478 | return redirect(url('changeset_home', |
|
479 | 479 | repo_name=c.repo_name, revision='tip')) |
|
480 | 480 | |
|
481 | 481 | @LoginRequired() |
|
482 | 482 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
483 | 483 | def delete_home(self, repo_name, revision, f_path): |
|
484 | 484 | commit_id = revision |
|
485 | 485 | |
|
486 | 486 | repo = c.rhodecode_db_repo |
|
487 | 487 | if repo.enable_locking and repo.locked[0]: |
|
488 | 488 | h.flash(_('This repository has been locked by %s on %s') |
|
489 | 489 | % (h.person_by_id(repo.locked[0]), |
|
490 | 490 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
491 | 491 | 'warning') |
|
492 | 492 | return redirect(h.url('files_home', |
|
493 | 493 | repo_name=repo_name, revision='tip')) |
|
494 | 494 | |
|
495 | 495 | if not self._is_valid_head(commit_id, repo.scm_instance()): |
|
496 | 496 | h.flash(_('You can only delete files with revision ' |
|
497 | 497 | 'being a valid branch '), category='warning') |
|
498 | 498 | return redirect(h.url('files_home', |
|
499 | 499 | repo_name=repo_name, revision='tip', |
|
500 | 500 | f_path=f_path)) |
|
501 | 501 | |
|
502 | 502 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
503 | 503 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) |
|
504 | 504 | |
|
505 | 505 | c.default_message = _( |
|
506 | 506 | 'Deleted file %s via RhodeCode Enterprise') % (f_path) |
|
507 | 507 | c.f_path = f_path |
|
508 | 508 | |
|
509 | 509 | return render('files/files_delete.mako') |
|
510 | 510 | |
|
511 | 511 | @CSRFRequired() |
|
512 | 512 | @LoginRequired() |
|
513 | 513 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
514 | 514 | def edit(self, repo_name, revision, f_path): |
|
515 | 515 | commit_id = revision |
|
516 | 516 | |
|
517 | 517 | repo = c.rhodecode_db_repo |
|
518 | 518 | if repo.enable_locking and repo.locked[0]: |
|
519 | 519 | h.flash(_('This repository has been locked by %s on %s') |
|
520 | 520 | % (h.person_by_id(repo.locked[0]), |
|
521 | 521 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
522 | 522 | 'warning') |
|
523 | 523 | return redirect(h.url('files_home', |
|
524 | 524 | repo_name=repo_name, revision='tip')) |
|
525 | 525 | |
|
526 | 526 | if not self._is_valid_head(commit_id, repo.scm_instance()): |
|
527 | 527 | h.flash(_('You can only edit files with revision ' |
|
528 | 528 | 'being a valid branch '), category='warning') |
|
529 | 529 | return redirect(h.url('files_home', |
|
530 | 530 | repo_name=repo_name, revision='tip', |
|
531 | 531 | f_path=f_path)) |
|
532 | 532 | |
|
533 | 533 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
534 | 534 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) |
|
535 | 535 | |
|
536 | 536 | if c.file.is_binary: |
|
537 | 537 | return redirect(url('files_home', repo_name=c.repo_name, |
|
538 | 538 | revision=c.commit.raw_id, f_path=f_path)) |
|
539 | 539 | c.default_message = _( |
|
540 | 540 | 'Edited file %s via RhodeCode Enterprise') % (f_path) |
|
541 | 541 | c.f_path = f_path |
|
542 | 542 | old_content = c.file.content |
|
543 | 543 | sl = old_content.splitlines(1) |
|
544 | 544 | first_line = sl[0] if sl else '' |
|
545 | 545 | |
|
546 | 546 | # modes: 0 - Unix, 1 - Mac, 2 - DOS |
|
547 | 547 | mode = detect_mode(first_line, 0) |
|
548 | 548 | content = convert_line_endings(request.POST.get('content', ''), mode) |
|
549 | 549 | |
|
550 | 550 | message = request.POST.get('message') or c.default_message |
|
551 | 551 | org_f_path = c.file.unicode_path |
|
552 | 552 | filename = request.POST['filename'] |
|
553 | 553 | org_filename = c.file.name |
|
554 | 554 | |
|
555 | 555 | if content == old_content and filename == org_filename: |
|
556 | 556 | h.flash(_('No changes'), category='warning') |
|
557 | 557 | return redirect(url('changeset_home', repo_name=c.repo_name, |
|
558 | 558 | revision='tip')) |
|
559 | 559 | try: |
|
560 | 560 | mapping = { |
|
561 | 561 | org_f_path: { |
|
562 | 562 | 'org_filename': org_f_path, |
|
563 | 563 | 'filename': os.path.join(c.file.dir_path, filename), |
|
564 | 564 | 'content': content, |
|
565 | 565 | 'lexer': '', |
|
566 | 566 | 'op': 'mod', |
|
567 | 567 | } |
|
568 | 568 | } |
|
569 | 569 | |
|
570 | 570 | ScmModel().update_nodes( |
|
571 | 571 | user=c.rhodecode_user.user_id, |
|
572 | 572 | repo=c.rhodecode_db_repo, |
|
573 | 573 | message=message, |
|
574 | 574 | nodes=mapping, |
|
575 | 575 | parent_commit=c.commit, |
|
576 | 576 | ) |
|
577 | 577 | |
|
578 | 578 | h.flash(_('Successfully committed to %s') % f_path, |
|
579 | 579 | category='success') |
|
580 | 580 | except Exception: |
|
581 | 581 | msg = _('Error occurred during commit') |
|
582 | 582 | log.exception(msg) |
|
583 | 583 | h.flash(msg, category='error') |
|
584 | 584 | return redirect(url('changeset_home', |
|
585 | 585 | repo_name=c.repo_name, revision='tip')) |
|
586 | 586 | |
|
587 | 587 | @LoginRequired() |
|
588 | 588 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
589 | 589 | def edit_home(self, repo_name, revision, f_path): |
|
590 | 590 | commit_id = revision |
|
591 | 591 | |
|
592 | 592 | repo = c.rhodecode_db_repo |
|
593 | 593 | if repo.enable_locking and repo.locked[0]: |
|
594 | 594 | h.flash(_('This repository has been locked by %s on %s') |
|
595 | 595 | % (h.person_by_id(repo.locked[0]), |
|
596 | 596 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
597 | 597 | 'warning') |
|
598 | 598 | return redirect(h.url('files_home', |
|
599 | 599 | repo_name=repo_name, revision='tip')) |
|
600 | 600 | |
|
601 | 601 | if not self._is_valid_head(commit_id, repo.scm_instance()): |
|
602 | 602 | h.flash(_('You can only edit files with revision ' |
|
603 | 603 | 'being a valid branch '), category='warning') |
|
604 | 604 | return redirect(h.url('files_home', |
|
605 | 605 | repo_name=repo_name, revision='tip', |
|
606 | 606 | f_path=f_path)) |
|
607 | 607 | |
|
608 | 608 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
609 | 609 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) |
|
610 | 610 | |
|
611 | 611 | if c.file.is_binary: |
|
612 | 612 | return redirect(url('files_home', repo_name=c.repo_name, |
|
613 | 613 | revision=c.commit.raw_id, f_path=f_path)) |
|
614 | 614 | c.default_message = _( |
|
615 | 615 | 'Edited file %s via RhodeCode Enterprise') % (f_path) |
|
616 | 616 | c.f_path = f_path |
|
617 | 617 | |
|
618 | 618 | return render('files/files_edit.mako') |
|
619 | 619 | |
|
620 | 620 | def _is_valid_head(self, commit_id, repo): |
|
621 | 621 | # check if commit is a branch identifier- basically we cannot |
|
622 | 622 | # create multiple heads via file editing |
|
623 | 623 | valid_heads = repo.branches.keys() + repo.branches.values() |
|
624 | 624 | |
|
625 | 625 | if h.is_svn(repo) and not repo.is_empty(): |
|
626 | 626 | # Note: Subversion only has one head, we add it here in case there |
|
627 | 627 | # is no branch matched. |
|
628 | 628 | valid_heads.append(repo.get_commit(commit_idx=-1).raw_id) |
|
629 | 629 | |
|
630 | 630 | # check if commit is a branch name or branch hash |
|
631 | 631 | return commit_id in valid_heads |
|
632 | 632 | |
|
633 | 633 | @CSRFRequired() |
|
634 | 634 | @LoginRequired() |
|
635 | 635 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
636 | 636 | def add(self, repo_name, revision, f_path): |
|
637 | 637 | repo = Repository.get_by_repo_name(repo_name) |
|
638 | 638 | if repo.enable_locking and repo.locked[0]: |
|
639 | 639 | h.flash(_('This repository has been locked by %s on %s') |
|
640 | 640 | % (h.person_by_id(repo.locked[0]), |
|
641 | 641 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
642 | 642 | 'warning') |
|
643 | 643 | return redirect(h.url('files_home', |
|
644 | 644 | repo_name=repo_name, revision='tip')) |
|
645 | 645 | |
|
646 | 646 | r_post = request.POST |
|
647 | 647 | |
|
648 | 648 | c.commit = self.__get_commit_or_redirect( |
|
649 | 649 | revision, repo_name, redirect_after=False) |
|
650 | 650 | if c.commit is None: |
|
651 | 651 | c.commit = EmptyCommit(alias=c.rhodecode_repo.alias) |
|
652 | 652 | c.default_message = (_('Added file via RhodeCode Enterprise')) |
|
653 | 653 | c.f_path = f_path |
|
654 | 654 | unix_mode = 0 |
|
655 | 655 | content = convert_line_endings(r_post.get('content', ''), unix_mode) |
|
656 | 656 | |
|
657 | 657 | message = r_post.get('message') or c.default_message |
|
658 | 658 | filename = r_post.get('filename') |
|
659 | 659 | location = r_post.get('location', '') # dir location |
|
660 | 660 | file_obj = r_post.get('upload_file', None) |
|
661 | 661 | |
|
662 | 662 | if file_obj is not None and hasattr(file_obj, 'filename'): |
|
663 | 663 | filename = r_post.get('filename_upload') |
|
664 | 664 | content = file_obj.file |
|
665 | 665 | |
|
666 | 666 | if hasattr(content, 'file'): |
|
667 | 667 | # non posix systems store real file under file attr |
|
668 | 668 | content = content.file |
|
669 | 669 | |
|
670 | 670 | # If there's no commit, redirect to repo summary |
|
671 | 671 | if type(c.commit) is EmptyCommit: |
|
672 | 672 | redirect_url = h.route_path('repo_summary', repo_name=c.repo_name) |
|
673 | 673 | else: |
|
674 | 674 | redirect_url = url("changeset_home", repo_name=c.repo_name, |
|
675 | 675 | revision='tip') |
|
676 | 676 | |
|
677 | 677 | if not filename: |
|
678 | 678 | h.flash(_('No filename'), category='warning') |
|
679 | 679 | return redirect(redirect_url) |
|
680 | 680 | |
|
681 | 681 | # extract the location from filename, |
|
682 | 682 | # allows using foo/bar.txt syntax to create subdirectories |
|
683 | 683 | subdir_loc = filename.rsplit('/', 1) |
|
684 | 684 | if len(subdir_loc) == 2: |
|
685 | 685 | location = os.path.join(location, subdir_loc[0]) |
|
686 | 686 | |
|
687 | 687 | # strip all crap out of file, just leave the basename |
|
688 | 688 | filename = os.path.basename(filename) |
|
689 | 689 | node_path = os.path.join(location, filename) |
|
690 | 690 | author = c.rhodecode_user.full_contact |
|
691 | 691 | |
|
692 | 692 | try: |
|
693 | 693 | nodes = { |
|
694 | 694 | node_path: { |
|
695 | 695 | 'content': content |
|
696 | 696 | } |
|
697 | 697 | } |
|
698 | 698 | self.scm_model.create_nodes( |
|
699 | 699 | user=c.rhodecode_user.user_id, |
|
700 | 700 | repo=c.rhodecode_db_repo, |
|
701 | 701 | message=message, |
|
702 | 702 | nodes=nodes, |
|
703 | 703 | parent_commit=c.commit, |
|
704 | 704 | author=author, |
|
705 | 705 | ) |
|
706 | 706 | |
|
707 | 707 | h.flash(_('Successfully committed to %s') % node_path, |
|
708 | 708 | category='success') |
|
709 | 709 | except NonRelativePathError as e: |
|
710 | 710 | h.flash(_( |
|
711 | 711 | 'The location specified must be a relative path and must not ' |
|
712 | 712 | 'contain .. in the path'), category='warning') |
|
713 | 713 | return redirect(url('changeset_home', repo_name=c.repo_name, |
|
714 | 714 | revision='tip')) |
|
715 | 715 | except (NodeError, NodeAlreadyExistsError) as e: |
|
716 | 716 | h.flash(_(e), category='error') |
|
717 | 717 | except Exception: |
|
718 | 718 | msg = _('Error occurred during commit') |
|
719 | 719 | log.exception(msg) |
|
720 | 720 | h.flash(msg, category='error') |
|
721 | 721 | return redirect(url('changeset_home', |
|
722 | 722 | repo_name=c.repo_name, revision='tip')) |
|
723 | 723 | |
|
724 | 724 | @LoginRequired() |
|
725 | 725 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
726 | 726 | def add_home(self, repo_name, revision, f_path): |
|
727 | 727 | |
|
728 | 728 | repo = Repository.get_by_repo_name(repo_name) |
|
729 | 729 | if repo.enable_locking and repo.locked[0]: |
|
730 | 730 | h.flash(_('This repository has been locked by %s on %s') |
|
731 | 731 | % (h.person_by_id(repo.locked[0]), |
|
732 | 732 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
733 | 733 | 'warning') |
|
734 | 734 | return redirect(h.url('files_home', |
|
735 | 735 | repo_name=repo_name, revision='tip')) |
|
736 | 736 | |
|
737 | 737 | c.commit = self.__get_commit_or_redirect( |
|
738 | 738 | revision, repo_name, redirect_after=False) |
|
739 | 739 | if c.commit is None: |
|
740 | 740 | c.commit = EmptyCommit(alias=c.rhodecode_repo.alias) |
|
741 | 741 | c.default_message = (_('Added file via RhodeCode Enterprise')) |
|
742 | 742 | c.f_path = f_path |
|
743 | 743 | |
|
744 | 744 | return render('files/files_add.mako') |
|
745 | 745 | |
|
746 | 746 | @LoginRequired() |
|
747 | 747 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
748 | 748 | 'repository.admin') |
|
749 | 749 | def archivefile(self, repo_name, fname): |
|
750 | 750 | fileformat = None |
|
751 | 751 | commit_id = None |
|
752 | 752 | ext = None |
|
753 | 753 | subrepos = request.GET.get('subrepos') == 'true' |
|
754 | 754 | |
|
755 | 755 | for a_type, ext_data in settings.ARCHIVE_SPECS.items(): |
|
756 | 756 | archive_spec = fname.split(ext_data[1]) |
|
757 | 757 | if len(archive_spec) == 2 and archive_spec[1] == '': |
|
758 | 758 | fileformat = a_type or ext_data[1] |
|
759 | 759 | commit_id = archive_spec[0] |
|
760 | 760 | ext = ext_data[1] |
|
761 | 761 | |
|
762 | 762 | dbrepo = RepoModel().get_by_repo_name(repo_name) |
|
763 | 763 | if not dbrepo.enable_downloads: |
|
764 | 764 | return _('Downloads disabled') |
|
765 | 765 | |
|
766 | 766 | try: |
|
767 | 767 | commit = c.rhodecode_repo.get_commit(commit_id) |
|
768 | 768 | content_type = settings.ARCHIVE_SPECS[fileformat][0] |
|
769 | 769 | except CommitDoesNotExistError: |
|
770 | 770 | return _('Unknown revision %s') % commit_id |
|
771 | 771 | except EmptyRepositoryError: |
|
772 | 772 | return _('Empty repository') |
|
773 | 773 | except KeyError: |
|
774 | 774 | return _('Unknown archive type') |
|
775 | 775 | |
|
776 | 776 | # archive cache |
|
777 | 777 | from rhodecode import CONFIG |
|
778 | 778 | |
|
779 | 779 | archive_name = '%s-%s%s%s' % ( |
|
780 | 780 | safe_str(repo_name.replace('/', '_')), |
|
781 | 781 | '-sub' if subrepos else '', |
|
782 | 782 | safe_str(commit.short_id), ext) |
|
783 | 783 | |
|
784 | 784 | use_cached_archive = False |
|
785 | 785 | archive_cache_enabled = CONFIG.get( |
|
786 | 786 | 'archive_cache_dir') and not request.GET.get('no_cache') |
|
787 | 787 | |
|
788 | 788 | if archive_cache_enabled: |
|
789 | 789 | # check if we it's ok to write |
|
790 | 790 | if not os.path.isdir(CONFIG['archive_cache_dir']): |
|
791 | 791 | os.makedirs(CONFIG['archive_cache_dir']) |
|
792 | 792 | cached_archive_path = os.path.join( |
|
793 | 793 | CONFIG['archive_cache_dir'], archive_name) |
|
794 | 794 | if os.path.isfile(cached_archive_path): |
|
795 | 795 | log.debug('Found cached archive in %s', cached_archive_path) |
|
796 | 796 | fd, archive = None, cached_archive_path |
|
797 | 797 | use_cached_archive = True |
|
798 | 798 | else: |
|
799 | 799 | log.debug('Archive %s is not yet cached', archive_name) |
|
800 | 800 | |
|
801 | 801 | if not use_cached_archive: |
|
802 | 802 | # generate new archive |
|
803 | 803 | fd, archive = tempfile.mkstemp() |
|
804 | 804 | log.debug('Creating new temp archive in %s' % (archive,)) |
|
805 | 805 | try: |
|
806 | 806 | commit.archive_repo(archive, kind=fileformat, subrepos=subrepos) |
|
807 | 807 | except ImproperArchiveTypeError: |
|
808 | 808 | return _('Unknown archive type') |
|
809 | 809 | if archive_cache_enabled: |
|
810 | 810 | # if we generated the archive and we have cache enabled |
|
811 | 811 | # let's use this for future |
|
812 | 812 | log.debug('Storing new archive in %s' % (cached_archive_path,)) |
|
813 | 813 | shutil.move(archive, cached_archive_path) |
|
814 | 814 | archive = cached_archive_path |
|
815 | 815 | |
|
816 | 816 | # store download action |
|
817 | 817 | audit_logger.store_web( |
|
818 | 818 | action='repo.archive.download', |
|
819 | 819 | action_data={'user_agent': request.user_agent, |
|
820 | 820 | 'archive_name': archive_name, |
|
821 | 821 | 'archive_spec': fname, |
|
822 | 822 | 'archive_cached': use_cached_archive}, |
|
823 | 823 | user=c.rhodecode_user, |
|
824 | 824 | repo=dbrepo, |
|
825 | 825 | commit=True |
|
826 | 826 | ) |
|
827 | 827 | |
|
828 | 828 | response.content_disposition = str( |
|
829 | 829 | 'attachment; filename=%s' % archive_name) |
|
830 | 830 | response.content_type = str(content_type) |
|
831 | 831 | |
|
832 | 832 | def get_chunked_archive(archive): |
|
833 | 833 | with open(archive, 'rb') as stream: |
|
834 | 834 | while True: |
|
835 | 835 | data = stream.read(16 * 1024) |
|
836 | 836 | if not data: |
|
837 | 837 | if fd: # fd means we used temporary file |
|
838 | 838 | os.close(fd) |
|
839 | 839 | if not archive_cache_enabled: |
|
840 | 840 | log.debug('Destroying temp archive %s', archive) |
|
841 | 841 | os.remove(archive) |
|
842 | 842 | break |
|
843 | 843 | yield data |
|
844 | 844 | |
|
845 | 845 | return get_chunked_archive(archive) |
|
846 | 846 | |
|
847 | 847 | @LoginRequired() |
|
848 | 848 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
849 | 849 | 'repository.admin') |
|
850 | 850 | def diff(self, repo_name, f_path): |
|
851 | 851 | |
|
852 | 852 | c.action = request.GET.get('diff') |
|
853 | 853 | diff1 = request.GET.get('diff1', '') |
|
854 | 854 | diff2 = request.GET.get('diff2', '') |
|
855 | 855 | |
|
856 | 856 | path1, diff1 = parse_path_ref(diff1, default_path=f_path) |
|
857 | 857 | |
|
858 | 858 | ignore_whitespace = str2bool(request.GET.get('ignorews')) |
|
859 | 859 | line_context = request.GET.get('context', 3) |
|
860 | 860 | |
|
861 | 861 | if not any((diff1, diff2)): |
|
862 | 862 | h.flash( |
|
863 | 863 | 'Need query parameter "diff1" or "diff2" to generate a diff.', |
|
864 | 864 | category='error') |
|
865 | 865 | raise HTTPBadRequest() |
|
866 | 866 | |
|
867 | 867 | if c.action not in ['download', 'raw']: |
|
868 | 868 | # redirect to new view if we render diff |
|
869 | 869 | return redirect( |
|
870 | 870 | url('compare_url', repo_name=repo_name, |
|
871 | 871 | source_ref_type='rev', |
|
872 | 872 | source_ref=diff1, |
|
873 | 873 | target_repo=c.repo_name, |
|
874 | 874 | target_ref_type='rev', |
|
875 | 875 | target_ref=diff2, |
|
876 | 876 | f_path=f_path)) |
|
877 | 877 | |
|
878 | 878 | try: |
|
879 | 879 | node1 = self._get_file_node(diff1, path1) |
|
880 | 880 | node2 = self._get_file_node(diff2, f_path) |
|
881 | 881 | except (RepositoryError, NodeError): |
|
882 | 882 | log.exception("Exception while trying to get node from repository") |
|
883 | 883 | return redirect(url( |
|
884 | 884 | 'files_home', repo_name=c.repo_name, f_path=f_path)) |
|
885 | 885 | |
|
886 | 886 | if all(isinstance(node.commit, EmptyCommit) |
|
887 | 887 | for node in (node1, node2)): |
|
888 | 888 | raise HTTPNotFound |
|
889 | 889 | |
|
890 | 890 | c.commit_1 = node1.commit |
|
891 | 891 | c.commit_2 = node2.commit |
|
892 | 892 | |
|
893 | 893 | if c.action == 'download': |
|
894 | 894 | _diff = diffs.get_gitdiff(node1, node2, |
|
895 | 895 | ignore_whitespace=ignore_whitespace, |
|
896 | 896 | context=line_context) |
|
897 | 897 | diff = diffs.DiffProcessor(_diff, format='gitdiff') |
|
898 | 898 | |
|
899 | 899 | diff_name = '%s_vs_%s.diff' % (diff1, diff2) |
|
900 | 900 | response.content_type = 'text/plain' |
|
901 | 901 | response.content_disposition = ( |
|
902 | 902 | 'attachment; filename=%s' % (diff_name,) |
|
903 | 903 | ) |
|
904 | 904 | charset = self._get_default_encoding() |
|
905 | 905 | if charset: |
|
906 | 906 | response.charset = charset |
|
907 | 907 | return diff.as_raw() |
|
908 | 908 | |
|
909 | 909 | elif c.action == 'raw': |
|
910 | 910 | _diff = diffs.get_gitdiff(node1, node2, |
|
911 | 911 | ignore_whitespace=ignore_whitespace, |
|
912 | 912 | context=line_context) |
|
913 | 913 | diff = diffs.DiffProcessor(_diff, format='gitdiff') |
|
914 | 914 | response.content_type = 'text/plain' |
|
915 | 915 | charset = self._get_default_encoding() |
|
916 | 916 | if charset: |
|
917 | 917 | response.charset = charset |
|
918 | 918 | return diff.as_raw() |
|
919 | 919 | |
|
920 | 920 | else: |
|
921 | 921 | return redirect( |
|
922 | 922 | url('compare_url', repo_name=repo_name, |
|
923 | 923 | source_ref_type='rev', |
|
924 | 924 | source_ref=diff1, |
|
925 | 925 | target_repo=c.repo_name, |
|
926 | 926 | target_ref_type='rev', |
|
927 | 927 | target_ref=diff2, |
|
928 | 928 | f_path=f_path)) |
|
929 | 929 | |
|
930 | 930 | @LoginRequired() |
|
931 | 931 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
932 | 932 | 'repository.admin') |
|
933 | 933 | def diff_2way(self, repo_name, f_path): |
|
934 | 934 | """ |
|
935 | 935 | Kept only to make OLD links work |
|
936 | 936 | """ |
|
937 | 937 | diff1 = request.GET.get('diff1', '') |
|
938 | 938 | diff2 = request.GET.get('diff2', '') |
|
939 | 939 | |
|
940 | 940 | if not any((diff1, diff2)): |
|
941 | 941 | h.flash( |
|
942 | 942 | 'Need query parameter "diff1" or "diff2" to generate a diff.', |
|
943 | 943 | category='error') |
|
944 | 944 | raise HTTPBadRequest() |
|
945 | 945 | |
|
946 | 946 | return redirect( |
|
947 | 947 | url('compare_url', repo_name=repo_name, |
|
948 | 948 | source_ref_type='rev', |
|
949 | 949 | source_ref=diff1, |
|
950 | 950 | target_repo=c.repo_name, |
|
951 | 951 | target_ref_type='rev', |
|
952 | 952 | target_ref=diff2, |
|
953 | 953 | f_path=f_path, |
|
954 | 954 | diffmode='sideside')) |
|
955 | 955 | |
|
956 | 956 | def _get_file_node(self, commit_id, f_path): |
|
957 | 957 | if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]: |
|
958 | 958 | commit = c.rhodecode_repo.get_commit(commit_id=commit_id) |
|
959 | 959 | try: |
|
960 | 960 | node = commit.get_node(f_path) |
|
961 | 961 | if node.is_dir(): |
|
962 | 962 | raise NodeError('%s path is a %s not a file' |
|
963 | 963 | % (node, type(node))) |
|
964 | 964 | except NodeDoesNotExistError: |
|
965 | 965 | commit = EmptyCommit( |
|
966 | 966 | commit_id=commit_id, |
|
967 | 967 | idx=commit.idx, |
|
968 | 968 | repo=commit.repository, |
|
969 | 969 | alias=commit.repository.alias, |
|
970 | 970 | message=commit.message, |
|
971 | 971 | author=commit.author, |
|
972 | 972 | date=commit.date) |
|
973 | 973 | node = FileNode(f_path, '', commit=commit) |
|
974 | 974 | else: |
|
975 | 975 | commit = EmptyCommit( |
|
976 | 976 | repo=c.rhodecode_repo, |
|
977 | 977 | alias=c.rhodecode_repo.alias) |
|
978 | 978 | node = FileNode(f_path, '', commit=commit) |
|
979 | 979 | return node |
|
980 | 980 | |
|
981 | 981 | def _get_node_history(self, commit, f_path, commits=None): |
|
982 | 982 | """ |
|
983 | 983 | get commit history for given node |
|
984 | 984 | |
|
985 | 985 | :param commit: commit to calculate history |
|
986 | 986 | :param f_path: path for node to calculate history for |
|
987 | 987 | :param commits: if passed don't calculate history and take |
|
988 | 988 | commits defined in this list |
|
989 | 989 | """ |
|
990 | 990 | # calculate history based on tip |
|
991 | 991 | tip = c.rhodecode_repo.get_commit() |
|
992 | 992 | if commits is None: |
|
993 | 993 | pre_load = ["author", "branch"] |
|
994 | 994 | try: |
|
995 | 995 | commits = tip.get_file_history(f_path, pre_load=pre_load) |
|
996 | 996 | except (NodeDoesNotExistError, CommitError): |
|
997 | 997 | # this node is not present at tip! |
|
998 | 998 | commits = commit.get_file_history(f_path, pre_load=pre_load) |
|
999 | 999 | |
|
1000 | 1000 | history = [] |
|
1001 | 1001 | commits_group = ([], _("Changesets")) |
|
1002 | 1002 | for commit in commits: |
|
1003 | 1003 | branch = ' (%s)' % commit.branch if commit.branch else '' |
|
1004 | 1004 | n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch) |
|
1005 | 1005 | commits_group[0].append((commit.raw_id, n_desc,)) |
|
1006 | 1006 | history.append(commits_group) |
|
1007 | 1007 | |
|
1008 | 1008 | symbolic_reference = self._symbolic_reference |
|
1009 | 1009 | |
|
1010 | 1010 | if c.rhodecode_repo.alias == 'svn': |
|
1011 | 1011 | adjusted_f_path = self._adjust_file_path_for_svn( |
|
1012 | 1012 | f_path, c.rhodecode_repo) |
|
1013 | 1013 | if adjusted_f_path != f_path: |
|
1014 | 1014 | log.debug( |
|
1015 | 1015 | 'Recognized svn tag or branch in file "%s", using svn ' |
|
1016 | 1016 | 'specific symbolic references', f_path) |
|
1017 | 1017 | f_path = adjusted_f_path |
|
1018 | 1018 | symbolic_reference = self._symbolic_reference_svn |
|
1019 | 1019 | |
|
1020 | 1020 | branches = self._create_references( |
|
1021 | 1021 | c.rhodecode_repo.branches, symbolic_reference, f_path) |
|
1022 | 1022 | branches_group = (branches, _("Branches")) |
|
1023 | 1023 | |
|
1024 | 1024 | tags = self._create_references( |
|
1025 | 1025 | c.rhodecode_repo.tags, symbolic_reference, f_path) |
|
1026 | 1026 | tags_group = (tags, _("Tags")) |
|
1027 | 1027 | |
|
1028 | 1028 | history.append(branches_group) |
|
1029 | 1029 | history.append(tags_group) |
|
1030 | 1030 | |
|
1031 | 1031 | return history, commits |
|
1032 | 1032 | |
|
1033 | 1033 | def _adjust_file_path_for_svn(self, f_path, repo): |
|
1034 | 1034 | """ |
|
1035 | 1035 | Computes the relative path of `f_path`. |
|
1036 | 1036 | |
|
1037 | 1037 | This is mainly based on prefix matching of the recognized tags and |
|
1038 | 1038 | branches in the underlying repository. |
|
1039 | 1039 | """ |
|
1040 | 1040 | tags_and_branches = itertools.chain( |
|
1041 | 1041 | repo.branches.iterkeys(), |
|
1042 | 1042 | repo.tags.iterkeys()) |
|
1043 | 1043 | tags_and_branches = sorted(tags_and_branches, key=len, reverse=True) |
|
1044 | 1044 | |
|
1045 | 1045 | for name in tags_and_branches: |
|
1046 | 1046 | if f_path.startswith(name + '/'): |
|
1047 | 1047 | f_path = vcspath.relpath(f_path, name) |
|
1048 | 1048 | break |
|
1049 | 1049 | return f_path |
|
1050 | 1050 | |
|
1051 | 1051 | def _create_references( |
|
1052 | 1052 | self, branches_or_tags, symbolic_reference, f_path): |
|
1053 | 1053 | items = [] |
|
1054 | 1054 | for name, commit_id in branches_or_tags.items(): |
|
1055 | 1055 | sym_ref = symbolic_reference(commit_id, name, f_path) |
|
1056 | 1056 | items.append((sym_ref, name)) |
|
1057 | 1057 | return items |
|
1058 | 1058 | |
|
1059 | 1059 | def _symbolic_reference(self, commit_id, name, f_path): |
|
1060 | 1060 | return commit_id |
|
1061 | 1061 | |
|
1062 | 1062 | def _symbolic_reference_svn(self, commit_id, name, f_path): |
|
1063 | 1063 | new_f_path = vcspath.join(name, f_path) |
|
1064 | 1064 | return u'%s@%s' % (new_f_path, commit_id) |
|
1065 | 1065 | |
|
1066 | 1066 | @LoginRequired() |
|
1067 | 1067 | @XHRRequired() |
|
1068 | 1068 | @HasRepoPermissionAnyDecorator( |
|
1069 | 1069 | 'repository.read', 'repository.write', 'repository.admin') |
|
1070 | 1070 | @jsonify |
|
1071 | 1071 | def nodelist(self, repo_name, revision, f_path): |
|
1072 | 1072 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
1073 | 1073 | |
|
1074 | 1074 | metadata = self._get_nodelist_at_commit( |
|
1075 | 1075 | repo_name, commit.raw_id, f_path) |
|
1076 | 1076 | return {'nodes': metadata} |
|
1077 | 1077 | |
|
1078 | 1078 | @LoginRequired() |
|
1079 | 1079 | @XHRRequired() |
|
1080 | 1080 | @HasRepoPermissionAnyDecorator( |
|
1081 | 1081 | 'repository.read', 'repository.write', 'repository.admin') |
|
1082 | 1082 | def nodetree_full(self, repo_name, commit_id, f_path): |
|
1083 | 1083 | """ |
|
1084 | 1084 | Returns rendered html of file tree that contains commit date, |
|
1085 | 1085 | author, revision for the specified combination of |
|
1086 | 1086 | repo, commit_id and file path |
|
1087 | 1087 | |
|
1088 | 1088 | :param repo_name: name of the repository |
|
1089 | 1089 | :param commit_id: commit_id of file tree |
|
1090 | 1090 | :param f_path: file path of the requested directory |
|
1091 | 1091 | """ |
|
1092 | 1092 | |
|
1093 | 1093 | commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
1094 | 1094 | try: |
|
1095 | 1095 | dir_node = commit.get_node(f_path) |
|
1096 | 1096 | except RepositoryError as e: |
|
1097 | 1097 | return 'error {}'.format(safe_str(e)) |
|
1098 | 1098 | |
|
1099 | 1099 | if dir_node.is_file(): |
|
1100 | 1100 | return '' |
|
1101 | 1101 | |
|
1102 | 1102 | c.file = dir_node |
|
1103 | 1103 | c.commit = commit |
|
1104 | 1104 | |
|
1105 | 1105 | # using force=True here, make a little trick. We flush the cache and |
|
1106 | 1106 | # compute it using the same key as without full_load, so the fully |
|
1107 | 1107 | # loaded cached tree is now returned instead of partial |
|
1108 | 1108 | return self._get_tree_at_commit( |
|
1109 | 1109 | repo_name, commit.raw_id, dir_node.path, full_load=True, |
|
1110 | 1110 | force=True) |
@@ -1,1008 +1,1009 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | pull requests controller for rhodecode for initializing pull requests |
|
23 | 23 | """ |
|
24 | 24 | import types |
|
25 | 25 | |
|
26 | 26 | import peppercorn |
|
27 | 27 | import formencode |
|
28 | 28 | import logging |
|
29 | 29 | import collections |
|
30 | 30 | |
|
31 | 31 | from webob.exc import HTTPNotFound, HTTPForbidden, HTTPBadRequest |
|
32 | 32 | from pylons import request, tmpl_context as c, url |
|
33 | 33 | from pylons.controllers.util import redirect |
|
34 | 34 | from pylons.i18n.translation import _ |
|
35 | 35 | from pyramid.threadlocal import get_current_registry |
|
36 | 36 | from sqlalchemy.sql import func |
|
37 | 37 | from sqlalchemy.sql.expression import or_ |
|
38 | 38 | |
|
39 | 39 | from rhodecode import events |
|
40 | 40 | from rhodecode.lib import auth, diffs, helpers as h, codeblocks |
|
41 | 41 | from rhodecode.lib.ext_json import json |
|
42 | 42 | from rhodecode.lib.base import ( |
|
43 | 43 | BaseRepoController, render, vcs_operation_context) |
|
44 | 44 | from rhodecode.lib.auth import ( |
|
45 | 45 | LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, |
|
46 | 46 | HasAcceptedRepoType, XHRRequired) |
|
47 | 47 | from rhodecode.lib.channelstream import channelstream_request |
|
48 | 48 | from rhodecode.lib.utils import jsonify |
|
49 | 49 | from rhodecode.lib.utils2 import ( |
|
50 | 50 | safe_int, safe_str, str2bool, safe_unicode) |
|
51 | 51 | from rhodecode.lib.vcs.backends.base import ( |
|
52 | 52 | EmptyCommit, UpdateFailureReason, EmptyRepository) |
|
53 | 53 | from rhodecode.lib.vcs.exceptions import ( |
|
54 | 54 | EmptyRepositoryError, CommitDoesNotExistError, RepositoryRequirementError, |
|
55 | 55 | NodeDoesNotExistError) |
|
56 | 56 | |
|
57 | 57 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
58 | 58 | from rhodecode.model.comment import CommentsModel |
|
59 | 59 | from rhodecode.model.db import (PullRequest, ChangesetStatus, ChangesetComment, |
|
60 | 60 | Repository, PullRequestVersion) |
|
61 | 61 | from rhodecode.model.forms import PullRequestForm |
|
62 | 62 | from rhodecode.model.meta import Session |
|
63 | 63 | from rhodecode.model.pull_request import PullRequestModel, MergeCheck |
|
64 | 64 | |
|
65 | 65 | log = logging.getLogger(__name__) |
|
66 | 66 | |
|
67 | 67 | |
|
68 | 68 | class PullrequestsController(BaseRepoController): |
|
69 | 69 | |
|
70 | 70 | def __before__(self): |
|
71 | 71 | super(PullrequestsController, self).__before__() |
|
72 | 72 | c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED |
|
73 | 73 | c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED |
|
74 | 74 | |
|
75 | 75 | @LoginRequired() |
|
76 | 76 | @NotAnonymous() |
|
77 | 77 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
78 | 78 | 'repository.admin') |
|
79 | 79 | @HasAcceptedRepoType('git', 'hg') |
|
80 | 80 | def index(self): |
|
81 | 81 | source_repo = c.rhodecode_db_repo |
|
82 | 82 | |
|
83 | 83 | try: |
|
84 | 84 | source_repo.scm_instance().get_commit() |
|
85 | 85 | except EmptyRepositoryError: |
|
86 | 86 | h.flash(h.literal(_('There are no commits yet')), |
|
87 | 87 | category='warning') |
|
88 | 88 | redirect(h.route_path('repo_summary', repo_name=source_repo.repo_name)) |
|
89 | 89 | |
|
90 | 90 | commit_id = request.GET.get('commit') |
|
91 | 91 | branch_ref = request.GET.get('branch') |
|
92 | 92 | bookmark_ref = request.GET.get('bookmark') |
|
93 | 93 | |
|
94 | 94 | try: |
|
95 | 95 | source_repo_data = PullRequestModel().generate_repo_data( |
|
96 | 96 | source_repo, commit_id=commit_id, |
|
97 | 97 | branch=branch_ref, bookmark=bookmark_ref) |
|
98 | 98 | except CommitDoesNotExistError as e: |
|
99 | 99 | log.exception(e) |
|
100 | 100 | h.flash(_('Commit does not exist'), 'error') |
|
101 | 101 | redirect(url('pullrequest_home', repo_name=source_repo.repo_name)) |
|
102 | 102 | |
|
103 | 103 | default_target_repo = source_repo |
|
104 | 104 | |
|
105 | 105 | if source_repo.parent: |
|
106 | 106 | parent_vcs_obj = source_repo.parent.scm_instance() |
|
107 | 107 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): |
|
108 | 108 | # change default if we have a parent repo |
|
109 | 109 | default_target_repo = source_repo.parent |
|
110 | 110 | |
|
111 | 111 | target_repo_data = PullRequestModel().generate_repo_data( |
|
112 | 112 | default_target_repo) |
|
113 | 113 | |
|
114 | 114 | selected_source_ref = source_repo_data['refs']['selected_ref'] |
|
115 | 115 | |
|
116 | 116 | title_source_ref = selected_source_ref.split(':', 2)[1] |
|
117 | 117 | c.default_title = PullRequestModel().generate_pullrequest_title( |
|
118 | 118 | source=source_repo.repo_name, |
|
119 | 119 | source_ref=title_source_ref, |
|
120 | 120 | target=default_target_repo.repo_name |
|
121 | 121 | ) |
|
122 | 122 | |
|
123 | 123 | c.default_repo_data = { |
|
124 | 124 | 'source_repo_name': source_repo.repo_name, |
|
125 | 125 | 'source_refs_json': json.dumps(source_repo_data), |
|
126 | 126 | 'target_repo_name': default_target_repo.repo_name, |
|
127 | 127 | 'target_refs_json': json.dumps(target_repo_data), |
|
128 | 128 | } |
|
129 | 129 | c.default_source_ref = selected_source_ref |
|
130 | 130 | |
|
131 | 131 | return render('/pullrequests/pullrequest.mako') |
|
132 | 132 | |
|
133 | 133 | @LoginRequired() |
|
134 | 134 | @NotAnonymous() |
|
135 | 135 | @XHRRequired() |
|
136 | 136 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
137 | 137 | 'repository.admin') |
|
138 | 138 | @jsonify |
|
139 | 139 | def get_repo_refs(self, repo_name, target_repo_name): |
|
140 | 140 | repo = Repository.get_by_repo_name(target_repo_name) |
|
141 | 141 | if not repo: |
|
142 | 142 | raise HTTPNotFound |
|
143 | 143 | return PullRequestModel().generate_repo_data(repo) |
|
144 | 144 | |
|
145 | 145 | @LoginRequired() |
|
146 | 146 | @NotAnonymous() |
|
147 | 147 | @XHRRequired() |
|
148 | 148 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
149 | 149 | 'repository.admin') |
|
150 | 150 | @jsonify |
|
151 | 151 | def get_repo_destinations(self, repo_name): |
|
152 | 152 | repo = Repository.get_by_repo_name(repo_name) |
|
153 | 153 | if not repo: |
|
154 | 154 | raise HTTPNotFound |
|
155 | 155 | filter_query = request.GET.get('query') |
|
156 | 156 | |
|
157 | 157 | query = Repository.query() \ |
|
158 | 158 | .order_by(func.length(Repository.repo_name)) \ |
|
159 | 159 | .filter(or_( |
|
160 | 160 | Repository.repo_name == repo.repo_name, |
|
161 | 161 | Repository.fork_id == repo.repo_id)) |
|
162 | 162 | |
|
163 | 163 | if filter_query: |
|
164 | 164 | ilike_expression = u'%{}%'.format(safe_unicode(filter_query)) |
|
165 | 165 | query = query.filter( |
|
166 | 166 | Repository.repo_name.ilike(ilike_expression)) |
|
167 | 167 | |
|
168 | 168 | add_parent = False |
|
169 | 169 | if repo.parent: |
|
170 | 170 | if filter_query in repo.parent.repo_name: |
|
171 | 171 | parent_vcs_obj = repo.parent.scm_instance() |
|
172 | 172 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): |
|
173 | 173 | add_parent = True |
|
174 | 174 | |
|
175 | 175 | limit = 20 - 1 if add_parent else 20 |
|
176 | 176 | all_repos = query.limit(limit).all() |
|
177 | 177 | if add_parent: |
|
178 | 178 | all_repos += [repo.parent] |
|
179 | 179 | |
|
180 | 180 | repos = [] |
|
181 | 181 | for obj in self.scm_model.get_repos(all_repos): |
|
182 | 182 | repos.append({ |
|
183 | 183 | 'id': obj['name'], |
|
184 | 184 | 'text': obj['name'], |
|
185 | 185 | 'type': 'repo', |
|
186 | 186 | 'obj': obj['dbrepo'] |
|
187 | 187 | }) |
|
188 | 188 | |
|
189 | 189 | data = { |
|
190 | 190 | 'more': False, |
|
191 | 191 | 'results': [{ |
|
192 | 192 | 'text': _('Repositories'), |
|
193 | 193 | 'children': repos |
|
194 | 194 | }] if repos else [] |
|
195 | 195 | } |
|
196 | 196 | return data |
|
197 | 197 | |
|
198 | 198 | @LoginRequired() |
|
199 | 199 | @NotAnonymous() |
|
200 | 200 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
201 | 201 | 'repository.admin') |
|
202 | 202 | @HasAcceptedRepoType('git', 'hg') |
|
203 | 203 | @auth.CSRFRequired() |
|
204 | 204 | def create(self, repo_name): |
|
205 | 205 | repo = Repository.get_by_repo_name(repo_name) |
|
206 | 206 | if not repo: |
|
207 | 207 | raise HTTPNotFound |
|
208 | 208 | |
|
209 | 209 | controls = peppercorn.parse(request.POST.items()) |
|
210 | 210 | |
|
211 | 211 | try: |
|
212 | 212 | _form = PullRequestForm(repo.repo_id)().to_python(controls) |
|
213 | 213 | except formencode.Invalid as errors: |
|
214 | 214 | if errors.error_dict.get('revisions'): |
|
215 | 215 | msg = 'Revisions: %s' % errors.error_dict['revisions'] |
|
216 | 216 | elif errors.error_dict.get('pullrequest_title'): |
|
217 | 217 | msg = _('Pull request requires a title with min. 3 chars') |
|
218 | 218 | else: |
|
219 | 219 | msg = _('Error creating pull request: {}').format(errors) |
|
220 | 220 | log.exception(msg) |
|
221 | 221 | h.flash(msg, 'error') |
|
222 | 222 | |
|
223 | 223 | # would rather just go back to form ... |
|
224 | 224 | return redirect(url('pullrequest_home', repo_name=repo_name)) |
|
225 | 225 | |
|
226 | 226 | source_repo = _form['source_repo'] |
|
227 | 227 | source_ref = _form['source_ref'] |
|
228 | 228 | target_repo = _form['target_repo'] |
|
229 | 229 | target_ref = _form['target_ref'] |
|
230 | 230 | commit_ids = _form['revisions'][::-1] |
|
231 | 231 | |
|
232 | 232 | # find the ancestor for this pr |
|
233 | 233 | source_db_repo = Repository.get_by_repo_name(_form['source_repo']) |
|
234 | 234 | target_db_repo = Repository.get_by_repo_name(_form['target_repo']) |
|
235 | 235 | |
|
236 | 236 | source_scm = source_db_repo.scm_instance() |
|
237 | 237 | target_scm = target_db_repo.scm_instance() |
|
238 | 238 | |
|
239 | 239 | source_commit = source_scm.get_commit(source_ref.split(':')[-1]) |
|
240 | 240 | target_commit = target_scm.get_commit(target_ref.split(':')[-1]) |
|
241 | 241 | |
|
242 | 242 | ancestor = source_scm.get_common_ancestor( |
|
243 | 243 | source_commit.raw_id, target_commit.raw_id, target_scm) |
|
244 | 244 | |
|
245 | 245 | target_ref_type, target_ref_name, __ = _form['target_ref'].split(':') |
|
246 | 246 | target_ref = ':'.join((target_ref_type, target_ref_name, ancestor)) |
|
247 | 247 | |
|
248 | 248 | pullrequest_title = _form['pullrequest_title'] |
|
249 | 249 | title_source_ref = source_ref.split(':', 2)[1] |
|
250 | 250 | if not pullrequest_title: |
|
251 | 251 | pullrequest_title = PullRequestModel().generate_pullrequest_title( |
|
252 | 252 | source=source_repo, |
|
253 | 253 | source_ref=title_source_ref, |
|
254 | 254 | target=target_repo |
|
255 | 255 | ) |
|
256 | 256 | |
|
257 | 257 | description = _form['pullrequest_desc'] |
|
258 | 258 | |
|
259 | 259 | get_default_reviewers_data, validate_default_reviewers = \ |
|
260 | 260 | PullRequestModel().get_reviewer_functions() |
|
261 | 261 | |
|
262 | 262 | # recalculate reviewers logic, to make sure we can validate this |
|
263 | 263 | reviewer_rules = get_default_reviewers_data( |
|
264 | 264 | c.rhodecode_user.get_instance(), source_db_repo, |
|
265 | 265 | source_commit, target_db_repo, target_commit) |
|
266 | 266 | |
|
267 | 267 | given_reviewers = _form['review_members'] |
|
268 | 268 | reviewers = validate_default_reviewers(given_reviewers, reviewer_rules) |
|
269 | 269 | |
|
270 | 270 | try: |
|
271 | 271 | pull_request = PullRequestModel().create( |
|
272 | 272 | c.rhodecode_user.user_id, source_repo, source_ref, target_repo, |
|
273 | 273 | target_ref, commit_ids, reviewers, pullrequest_title, |
|
274 | 274 | description, reviewer_rules |
|
275 | 275 | ) |
|
276 | 276 | Session().commit() |
|
277 | 277 | h.flash(_('Successfully opened new pull request'), |
|
278 | 278 | category='success') |
|
279 | 279 | except Exception as e: |
|
280 | 280 | msg = _('Error occurred during creation of this pull request.') |
|
281 | 281 | log.exception(msg) |
|
282 | 282 | h.flash(msg, category='error') |
|
283 | 283 | return redirect(url('pullrequest_home', repo_name=repo_name)) |
|
284 | 284 | |
|
285 | 285 | return redirect(url('pullrequest_show', repo_name=target_repo, |
|
286 | 286 | pull_request_id=pull_request.pull_request_id)) |
|
287 | 287 | |
|
288 | 288 | @LoginRequired() |
|
289 | 289 | @NotAnonymous() |
|
290 | 290 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
291 | 291 | 'repository.admin') |
|
292 | 292 | @auth.CSRFRequired() |
|
293 | 293 | @jsonify |
|
294 | 294 | def update(self, repo_name, pull_request_id): |
|
295 | 295 | pull_request_id = safe_int(pull_request_id) |
|
296 | 296 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
297 | 297 | # only owner or admin can update it |
|
298 | 298 | allowed_to_update = PullRequestModel().check_user_update( |
|
299 | 299 | pull_request, c.rhodecode_user) |
|
300 | 300 | if allowed_to_update: |
|
301 | 301 | controls = peppercorn.parse(request.POST.items()) |
|
302 | 302 | |
|
303 | 303 | if 'review_members' in controls: |
|
304 | 304 | self._update_reviewers( |
|
305 | 305 | pull_request_id, controls['review_members'], |
|
306 | 306 | pull_request.reviewer_data) |
|
307 | 307 | elif str2bool(request.POST.get('update_commits', 'false')): |
|
308 | 308 | self._update_commits(pull_request) |
|
309 | 309 | elif str2bool(request.POST.get('edit_pull_request', 'false')): |
|
310 | 310 | self._edit_pull_request(pull_request) |
|
311 | 311 | else: |
|
312 | 312 | raise HTTPBadRequest() |
|
313 | 313 | return True |
|
314 | 314 | raise HTTPForbidden() |
|
315 | 315 | |
|
316 | 316 | def _edit_pull_request(self, pull_request): |
|
317 | 317 | try: |
|
318 | 318 | PullRequestModel().edit( |
|
319 | 319 | pull_request, request.POST.get('title'), |
|
320 | request.POST.get('description')) | |
|
320 | request.POST.get('description'), c.rhodecode_user) | |
|
321 | 321 | except ValueError: |
|
322 | 322 | msg = _(u'Cannot update closed pull requests.') |
|
323 | 323 | h.flash(msg, category='error') |
|
324 | 324 | return |
|
325 | 325 | else: |
|
326 | 326 | Session().commit() |
|
327 | 327 | |
|
328 | 328 | msg = _(u'Pull request title & description updated.') |
|
329 | 329 | h.flash(msg, category='success') |
|
330 | 330 | return |
|
331 | 331 | |
|
332 | 332 | def _update_commits(self, pull_request): |
|
333 | 333 | resp = PullRequestModel().update_commits(pull_request) |
|
334 | 334 | |
|
335 | 335 | if resp.executed: |
|
336 | 336 | |
|
337 | 337 | if resp.target_changed and resp.source_changed: |
|
338 | 338 | changed = 'target and source repositories' |
|
339 | 339 | elif resp.target_changed and not resp.source_changed: |
|
340 | 340 | changed = 'target repository' |
|
341 | 341 | elif not resp.target_changed and resp.source_changed: |
|
342 | 342 | changed = 'source repository' |
|
343 | 343 | else: |
|
344 | 344 | changed = 'nothing' |
|
345 | 345 | |
|
346 | 346 | msg = _( |
|
347 | 347 | u'Pull request updated to "{source_commit_id}" with ' |
|
348 | 348 | u'{count_added} added, {count_removed} removed commits. ' |
|
349 | 349 | u'Source of changes: {change_source}') |
|
350 | 350 | msg = msg.format( |
|
351 | 351 | source_commit_id=pull_request.source_ref_parts.commit_id, |
|
352 | 352 | count_added=len(resp.changes.added), |
|
353 | 353 | count_removed=len(resp.changes.removed), |
|
354 | 354 | change_source=changed) |
|
355 | 355 | h.flash(msg, category='success') |
|
356 | 356 | |
|
357 | 357 | registry = get_current_registry() |
|
358 | 358 | rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {}) |
|
359 | 359 | channelstream_config = rhodecode_plugins.get('channelstream', {}) |
|
360 | 360 | if channelstream_config.get('enabled'): |
|
361 | 361 | message = msg + ( |
|
362 | 362 | ' - <a onclick="window.location.reload()">' |
|
363 | 363 | '<strong>{}</strong></a>'.format(_('Reload page'))) |
|
364 | 364 | channel = '/repo${}$/pr/{}'.format( |
|
365 | 365 | pull_request.target_repo.repo_name, |
|
366 | 366 | pull_request.pull_request_id |
|
367 | 367 | ) |
|
368 | 368 | payload = { |
|
369 | 369 | 'type': 'message', |
|
370 | 370 | 'user': 'system', |
|
371 | 371 | 'exclude_users': [request.user.username], |
|
372 | 372 | 'channel': channel, |
|
373 | 373 | 'message': { |
|
374 | 374 | 'message': message, |
|
375 | 375 | 'level': 'success', |
|
376 | 376 | 'topic': '/notifications' |
|
377 | 377 | } |
|
378 | 378 | } |
|
379 | 379 | channelstream_request( |
|
380 | 380 | channelstream_config, [payload], '/message', |
|
381 | 381 | raise_exc=False) |
|
382 | 382 | else: |
|
383 | 383 | msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason] |
|
384 | 384 | warning_reasons = [ |
|
385 | 385 | UpdateFailureReason.NO_CHANGE, |
|
386 | 386 | UpdateFailureReason.WRONG_REF_TYPE, |
|
387 | 387 | ] |
|
388 | 388 | category = 'warning' if resp.reason in warning_reasons else 'error' |
|
389 | 389 | h.flash(msg, category=category) |
|
390 | 390 | |
|
391 | 391 | @auth.CSRFRequired() |
|
392 | 392 | @LoginRequired() |
|
393 | 393 | @NotAnonymous() |
|
394 | 394 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
395 | 395 | 'repository.admin') |
|
396 | 396 | def merge(self, repo_name, pull_request_id): |
|
397 | 397 | """ |
|
398 | 398 | POST /{repo_name}/pull-request/{pull_request_id} |
|
399 | 399 | |
|
400 | 400 | Merge will perform a server-side merge of the specified |
|
401 | 401 | pull request, if the pull request is approved and mergeable. |
|
402 | 402 | After successful merging, the pull request is automatically |
|
403 | 403 | closed, with a relevant comment. |
|
404 | 404 | """ |
|
405 | 405 | pull_request_id = safe_int(pull_request_id) |
|
406 | 406 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
407 | 407 | user = c.rhodecode_user |
|
408 | 408 | |
|
409 | 409 | check = MergeCheck.validate(pull_request, user) |
|
410 | 410 | merge_possible = not check.failed |
|
411 | 411 | |
|
412 | 412 | for err_type, error_msg in check.errors: |
|
413 | 413 | h.flash(error_msg, category=err_type) |
|
414 | 414 | |
|
415 | 415 | if merge_possible: |
|
416 | 416 | log.debug("Pre-conditions checked, trying to merge.") |
|
417 | 417 | extras = vcs_operation_context( |
|
418 | 418 | request.environ, repo_name=pull_request.target_repo.repo_name, |
|
419 | 419 | username=user.username, action='push', |
|
420 | 420 | scm=pull_request.target_repo.repo_type) |
|
421 | 421 | self._merge_pull_request(pull_request, user, extras) |
|
422 | 422 | |
|
423 | 423 | return redirect(url( |
|
424 | 424 | 'pullrequest_show', |
|
425 | 425 | repo_name=pull_request.target_repo.repo_name, |
|
426 | 426 | pull_request_id=pull_request.pull_request_id)) |
|
427 | 427 | |
|
428 | 428 | def _merge_pull_request(self, pull_request, user, extras): |
|
429 | 429 | merge_resp = PullRequestModel().merge( |
|
430 | 430 | pull_request, user, extras=extras) |
|
431 | 431 | |
|
432 | 432 | if merge_resp.executed: |
|
433 | 433 | log.debug("The merge was successful, closing the pull request.") |
|
434 | 434 | PullRequestModel().close_pull_request( |
|
435 | 435 | pull_request.pull_request_id, user) |
|
436 | 436 | Session().commit() |
|
437 | 437 | msg = _('Pull request was successfully merged and closed.') |
|
438 | 438 | h.flash(msg, category='success') |
|
439 | 439 | else: |
|
440 | 440 | log.debug( |
|
441 | 441 | "The merge was not successful. Merge response: %s", |
|
442 | 442 | merge_resp) |
|
443 | 443 | msg = PullRequestModel().merge_status_message( |
|
444 | 444 | merge_resp.failure_reason) |
|
445 | 445 | h.flash(msg, category='error') |
|
446 | 446 | |
|
447 | 447 | def _update_reviewers(self, pull_request_id, review_members, reviewer_rules): |
|
448 | 448 | |
|
449 | 449 | get_default_reviewers_data, validate_default_reviewers = \ |
|
450 | 450 | PullRequestModel().get_reviewer_functions() |
|
451 | 451 | |
|
452 | 452 | try: |
|
453 | 453 | reviewers = validate_default_reviewers(review_members, reviewer_rules) |
|
454 | 454 | except ValueError as e: |
|
455 | 455 | log.error('Reviewers Validation: {}'.format(e)) |
|
456 | 456 | h.flash(e, category='error') |
|
457 | 457 | return |
|
458 | 458 | |
|
459 |
PullRequestModel().update_reviewers( |
|
|
459 | PullRequestModel().update_reviewers( | |
|
460 | pull_request_id, reviewers, c.rhodecode_user) | |
|
460 | 461 | h.flash(_('Pull request reviewers updated.'), category='success') |
|
461 | 462 | Session().commit() |
|
462 | 463 | |
|
463 | 464 | @LoginRequired() |
|
464 | 465 | @NotAnonymous() |
|
465 | 466 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
466 | 467 | 'repository.admin') |
|
467 | 468 | @auth.CSRFRequired() |
|
468 | 469 | @jsonify |
|
469 | 470 | def delete(self, repo_name, pull_request_id): |
|
470 | 471 | pull_request_id = safe_int(pull_request_id) |
|
471 | 472 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
472 | 473 | |
|
473 | 474 | pr_closed = pull_request.is_closed() |
|
474 | 475 | allowed_to_delete = PullRequestModel().check_user_delete( |
|
475 | 476 | pull_request, c.rhodecode_user) and not pr_closed |
|
476 | 477 | |
|
477 | 478 | # only owner can delete it ! |
|
478 | 479 | if allowed_to_delete: |
|
479 | PullRequestModel().delete(pull_request) | |
|
480 | PullRequestModel().delete(pull_request, c.rhodecode_user) | |
|
480 | 481 | Session().commit() |
|
481 | 482 | h.flash(_('Successfully deleted pull request'), |
|
482 | 483 | category='success') |
|
483 | 484 | return redirect(url('my_account_pullrequests')) |
|
484 | 485 | |
|
485 | 486 | h.flash(_('Your are not allowed to delete this pull request'), |
|
486 | 487 | category='error') |
|
487 | 488 | raise HTTPForbidden() |
|
488 | 489 | |
|
489 | 490 | def _get_pr_version(self, pull_request_id, version=None): |
|
490 | 491 | pull_request_id = safe_int(pull_request_id) |
|
491 | 492 | at_version = None |
|
492 | 493 | |
|
493 | 494 | if version and version == 'latest': |
|
494 | 495 | pull_request_ver = PullRequest.get(pull_request_id) |
|
495 | 496 | pull_request_obj = pull_request_ver |
|
496 | 497 | _org_pull_request_obj = pull_request_obj |
|
497 | 498 | at_version = 'latest' |
|
498 | 499 | elif version: |
|
499 | 500 | pull_request_ver = PullRequestVersion.get_or_404(version) |
|
500 | 501 | pull_request_obj = pull_request_ver |
|
501 | 502 | _org_pull_request_obj = pull_request_ver.pull_request |
|
502 | 503 | at_version = pull_request_ver.pull_request_version_id |
|
503 | 504 | else: |
|
504 | 505 | _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404( |
|
505 | 506 | pull_request_id) |
|
506 | 507 | |
|
507 | 508 | pull_request_display_obj = PullRequest.get_pr_display_object( |
|
508 | 509 | pull_request_obj, _org_pull_request_obj) |
|
509 | 510 | |
|
510 | 511 | return _org_pull_request_obj, pull_request_obj, \ |
|
511 | 512 | pull_request_display_obj, at_version |
|
512 | 513 | |
|
513 | 514 | def _get_diffset( |
|
514 | 515 | self, source_repo, source_ref_id, target_ref_id, target_commit, |
|
515 | 516 | source_commit, diff_limit, file_limit, display_inline_comments): |
|
516 | 517 | vcs_diff = PullRequestModel().get_diff( |
|
517 | 518 | source_repo, source_ref_id, target_ref_id) |
|
518 | 519 | |
|
519 | 520 | diff_processor = diffs.DiffProcessor( |
|
520 | 521 | vcs_diff, format='newdiff', diff_limit=diff_limit, |
|
521 | 522 | file_limit=file_limit, show_full_diff=c.fulldiff) |
|
522 | 523 | |
|
523 | 524 | _parsed = diff_processor.prepare() |
|
524 | 525 | |
|
525 | 526 | def _node_getter(commit): |
|
526 | 527 | def get_node(fname): |
|
527 | 528 | try: |
|
528 | 529 | return commit.get_node(fname) |
|
529 | 530 | except NodeDoesNotExistError: |
|
530 | 531 | return None |
|
531 | 532 | |
|
532 | 533 | return get_node |
|
533 | 534 | |
|
534 | 535 | diffset = codeblocks.DiffSet( |
|
535 | 536 | repo_name=c.repo_name, |
|
536 | 537 | source_repo_name=c.source_repo.repo_name, |
|
537 | 538 | source_node_getter=_node_getter(target_commit), |
|
538 | 539 | target_node_getter=_node_getter(source_commit), |
|
539 | 540 | comments=display_inline_comments |
|
540 | 541 | ) |
|
541 | 542 | diffset = diffset.render_patchset( |
|
542 | 543 | _parsed, target_commit.raw_id, source_commit.raw_id) |
|
543 | 544 | |
|
544 | 545 | return diffset |
|
545 | 546 | |
|
546 | 547 | @LoginRequired() |
|
547 | 548 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
548 | 549 | 'repository.admin') |
|
549 | 550 | def show(self, repo_name, pull_request_id): |
|
550 | 551 | pull_request_id = safe_int(pull_request_id) |
|
551 | 552 | version = request.GET.get('version') |
|
552 | 553 | from_version = request.GET.get('from_version') or version |
|
553 | 554 | merge_checks = request.GET.get('merge_checks') |
|
554 | 555 | c.fulldiff = str2bool(request.GET.get('fulldiff')) |
|
555 | 556 | |
|
556 | 557 | (pull_request_latest, |
|
557 | 558 | pull_request_at_ver, |
|
558 | 559 | pull_request_display_obj, |
|
559 | 560 | at_version) = self._get_pr_version( |
|
560 | 561 | pull_request_id, version=version) |
|
561 | 562 | pr_closed = pull_request_latest.is_closed() |
|
562 | 563 | |
|
563 | 564 | if pr_closed and (version or from_version): |
|
564 | 565 | # not allow to browse versions |
|
565 | 566 | return redirect(h.url('pullrequest_show', repo_name=repo_name, |
|
566 | 567 | pull_request_id=pull_request_id)) |
|
567 | 568 | |
|
568 | 569 | versions = pull_request_display_obj.versions() |
|
569 | 570 | |
|
570 | 571 | c.at_version = at_version |
|
571 | 572 | c.at_version_num = (at_version |
|
572 | 573 | if at_version and at_version != 'latest' |
|
573 | 574 | else None) |
|
574 | 575 | c.at_version_pos = ChangesetComment.get_index_from_version( |
|
575 | 576 | c.at_version_num, versions) |
|
576 | 577 | |
|
577 | 578 | (prev_pull_request_latest, |
|
578 | 579 | prev_pull_request_at_ver, |
|
579 | 580 | prev_pull_request_display_obj, |
|
580 | 581 | prev_at_version) = self._get_pr_version( |
|
581 | 582 | pull_request_id, version=from_version) |
|
582 | 583 | |
|
583 | 584 | c.from_version = prev_at_version |
|
584 | 585 | c.from_version_num = (prev_at_version |
|
585 | 586 | if prev_at_version and prev_at_version != 'latest' |
|
586 | 587 | else None) |
|
587 | 588 | c.from_version_pos = ChangesetComment.get_index_from_version( |
|
588 | 589 | c.from_version_num, versions) |
|
589 | 590 | |
|
590 | 591 | # define if we're in COMPARE mode or VIEW at version mode |
|
591 | 592 | compare = at_version != prev_at_version |
|
592 | 593 | |
|
593 | 594 | # pull_requests repo_name we opened it against |
|
594 | 595 | # ie. target_repo must match |
|
595 | 596 | if repo_name != pull_request_at_ver.target_repo.repo_name: |
|
596 | 597 | raise HTTPNotFound |
|
597 | 598 | |
|
598 | 599 | c.shadow_clone_url = PullRequestModel().get_shadow_clone_url( |
|
599 | 600 | pull_request_at_ver) |
|
600 | 601 | |
|
601 | 602 | c.pull_request = pull_request_display_obj |
|
602 | 603 | c.pull_request_latest = pull_request_latest |
|
603 | 604 | |
|
604 | 605 | if compare or (at_version and not at_version == 'latest'): |
|
605 | 606 | c.allowed_to_change_status = False |
|
606 | 607 | c.allowed_to_update = False |
|
607 | 608 | c.allowed_to_merge = False |
|
608 | 609 | c.allowed_to_delete = False |
|
609 | 610 | c.allowed_to_comment = False |
|
610 | 611 | c.allowed_to_close = False |
|
611 | 612 | else: |
|
612 | 613 | can_change_status = PullRequestModel().check_user_change_status( |
|
613 | 614 | pull_request_at_ver, c.rhodecode_user) |
|
614 | 615 | c.allowed_to_change_status = can_change_status and not pr_closed |
|
615 | 616 | |
|
616 | 617 | c.allowed_to_update = PullRequestModel().check_user_update( |
|
617 | 618 | pull_request_latest, c.rhodecode_user) and not pr_closed |
|
618 | 619 | c.allowed_to_merge = PullRequestModel().check_user_merge( |
|
619 | 620 | pull_request_latest, c.rhodecode_user) and not pr_closed |
|
620 | 621 | c.allowed_to_delete = PullRequestModel().check_user_delete( |
|
621 | 622 | pull_request_latest, c.rhodecode_user) and not pr_closed |
|
622 | 623 | c.allowed_to_comment = not pr_closed |
|
623 | 624 | c.allowed_to_close = c.allowed_to_merge and not pr_closed |
|
624 | 625 | |
|
625 | 626 | c.forbid_adding_reviewers = False |
|
626 | 627 | c.forbid_author_to_review = False |
|
627 | 628 | c.forbid_commit_author_to_review = False |
|
628 | 629 | |
|
629 | 630 | if pull_request_latest.reviewer_data and \ |
|
630 | 631 | 'rules' in pull_request_latest.reviewer_data: |
|
631 | 632 | rules = pull_request_latest.reviewer_data['rules'] or {} |
|
632 | 633 | try: |
|
633 | 634 | c.forbid_adding_reviewers = rules.get( |
|
634 | 635 | 'forbid_adding_reviewers') |
|
635 | 636 | c.forbid_author_to_review = rules.get( |
|
636 | 637 | 'forbid_author_to_review') |
|
637 | 638 | c.forbid_commit_author_to_review = rules.get( |
|
638 | 639 | 'forbid_commit_author_to_review') |
|
639 | 640 | except Exception: |
|
640 | 641 | pass |
|
641 | 642 | |
|
642 | 643 | # check merge capabilities |
|
643 | 644 | _merge_check = MergeCheck.validate( |
|
644 | 645 | pull_request_latest, user=c.rhodecode_user) |
|
645 | 646 | c.pr_merge_errors = _merge_check.error_details |
|
646 | 647 | c.pr_merge_possible = not _merge_check.failed |
|
647 | 648 | c.pr_merge_message = _merge_check.merge_msg |
|
648 | 649 | |
|
649 | 650 | c.pull_request_review_status = _merge_check.review_status |
|
650 | 651 | if merge_checks: |
|
651 | 652 | return render('/pullrequests/pullrequest_merge_checks.mako') |
|
652 | 653 | |
|
653 | 654 | comments_model = CommentsModel() |
|
654 | 655 | |
|
655 | 656 | # reviewers and statuses |
|
656 | 657 | c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses() |
|
657 | 658 | allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers] |
|
658 | 659 | |
|
659 | 660 | # GENERAL COMMENTS with versions # |
|
660 | 661 | q = comments_model._all_general_comments_of_pull_request(pull_request_latest) |
|
661 | 662 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
662 | 663 | general_comments = q |
|
663 | 664 | |
|
664 | 665 | # pick comments we want to render at current version |
|
665 | 666 | c.comment_versions = comments_model.aggregate_comments( |
|
666 | 667 | general_comments, versions, c.at_version_num) |
|
667 | 668 | c.comments = c.comment_versions[c.at_version_num]['until'] |
|
668 | 669 | |
|
669 | 670 | # INLINE COMMENTS with versions # |
|
670 | 671 | q = comments_model._all_inline_comments_of_pull_request(pull_request_latest) |
|
671 | 672 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
672 | 673 | inline_comments = q |
|
673 | 674 | |
|
674 | 675 | c.inline_versions = comments_model.aggregate_comments( |
|
675 | 676 | inline_comments, versions, c.at_version_num, inline=True) |
|
676 | 677 | |
|
677 | 678 | # inject latest version |
|
678 | 679 | latest_ver = PullRequest.get_pr_display_object( |
|
679 | 680 | pull_request_latest, pull_request_latest) |
|
680 | 681 | |
|
681 | 682 | c.versions = versions + [latest_ver] |
|
682 | 683 | |
|
683 | 684 | # if we use version, then do not show later comments |
|
684 | 685 | # than current version |
|
685 | 686 | display_inline_comments = collections.defaultdict( |
|
686 | 687 | lambda: collections.defaultdict(list)) |
|
687 | 688 | for co in inline_comments: |
|
688 | 689 | if c.at_version_num: |
|
689 | 690 | # pick comments that are at least UPTO given version, so we |
|
690 | 691 | # don't render comments for higher version |
|
691 | 692 | should_render = co.pull_request_version_id and \ |
|
692 | 693 | co.pull_request_version_id <= c.at_version_num |
|
693 | 694 | else: |
|
694 | 695 | # showing all, for 'latest' |
|
695 | 696 | should_render = True |
|
696 | 697 | |
|
697 | 698 | if should_render: |
|
698 | 699 | display_inline_comments[co.f_path][co.line_no].append(co) |
|
699 | 700 | |
|
700 | 701 | # load diff data into template context, if we use compare mode then |
|
701 | 702 | # diff is calculated based on changes between versions of PR |
|
702 | 703 | |
|
703 | 704 | source_repo = pull_request_at_ver.source_repo |
|
704 | 705 | source_ref_id = pull_request_at_ver.source_ref_parts.commit_id |
|
705 | 706 | |
|
706 | 707 | target_repo = pull_request_at_ver.target_repo |
|
707 | 708 | target_ref_id = pull_request_at_ver.target_ref_parts.commit_id |
|
708 | 709 | |
|
709 | 710 | if compare: |
|
710 | 711 | # in compare switch the diff base to latest commit from prev version |
|
711 | 712 | target_ref_id = prev_pull_request_display_obj.revisions[0] |
|
712 | 713 | |
|
713 | 714 | # despite opening commits for bookmarks/branches/tags, we always |
|
714 | 715 | # convert this to rev to prevent changes after bookmark or branch change |
|
715 | 716 | c.source_ref_type = 'rev' |
|
716 | 717 | c.source_ref = source_ref_id |
|
717 | 718 | |
|
718 | 719 | c.target_ref_type = 'rev' |
|
719 | 720 | c.target_ref = target_ref_id |
|
720 | 721 | |
|
721 | 722 | c.source_repo = source_repo |
|
722 | 723 | c.target_repo = target_repo |
|
723 | 724 | |
|
724 | 725 | # diff_limit is the old behavior, will cut off the whole diff |
|
725 | 726 | # if the limit is applied otherwise will just hide the |
|
726 | 727 | # big files from the front-end |
|
727 | 728 | diff_limit = self.cut_off_limit_diff |
|
728 | 729 | file_limit = self.cut_off_limit_file |
|
729 | 730 | |
|
730 | 731 | c.commit_ranges = [] |
|
731 | 732 | source_commit = EmptyCommit() |
|
732 | 733 | target_commit = EmptyCommit() |
|
733 | 734 | c.missing_requirements = False |
|
734 | 735 | |
|
735 | 736 | source_scm = source_repo.scm_instance() |
|
736 | 737 | target_scm = target_repo.scm_instance() |
|
737 | 738 | |
|
738 | 739 | # try first shadow repo, fallback to regular repo |
|
739 | 740 | try: |
|
740 | 741 | commits_source_repo = pull_request_latest.get_shadow_repo() |
|
741 | 742 | except Exception: |
|
742 | 743 | log.debug('Failed to get shadow repo', exc_info=True) |
|
743 | 744 | commits_source_repo = source_scm |
|
744 | 745 | |
|
745 | 746 | c.commits_source_repo = commits_source_repo |
|
746 | 747 | commit_cache = {} |
|
747 | 748 | try: |
|
748 | 749 | pre_load = ["author", "branch", "date", "message"] |
|
749 | 750 | show_revs = pull_request_at_ver.revisions |
|
750 | 751 | for rev in show_revs: |
|
751 | 752 | comm = commits_source_repo.get_commit( |
|
752 | 753 | commit_id=rev, pre_load=pre_load) |
|
753 | 754 | c.commit_ranges.append(comm) |
|
754 | 755 | commit_cache[comm.raw_id] = comm |
|
755 | 756 | |
|
756 | 757 | # Order here matters, we first need to get target, and then |
|
757 | 758 | # the source |
|
758 | 759 | target_commit = commits_source_repo.get_commit( |
|
759 | 760 | commit_id=safe_str(target_ref_id)) |
|
760 | 761 | |
|
761 | 762 | source_commit = commits_source_repo.get_commit( |
|
762 | 763 | commit_id=safe_str(source_ref_id)) |
|
763 | 764 | |
|
764 | 765 | except CommitDoesNotExistError: |
|
765 | 766 | log.warning( |
|
766 | 767 | 'Failed to get commit from `{}` repo'.format( |
|
767 | 768 | commits_source_repo), exc_info=True) |
|
768 | 769 | except RepositoryRequirementError: |
|
769 | 770 | log.warning( |
|
770 | 771 | 'Failed to get all required data from repo', exc_info=True) |
|
771 | 772 | c.missing_requirements = True |
|
772 | 773 | |
|
773 | 774 | c.ancestor = None # set it to None, to hide it from PR view |
|
774 | 775 | |
|
775 | 776 | try: |
|
776 | 777 | ancestor_id = source_scm.get_common_ancestor( |
|
777 | 778 | source_commit.raw_id, target_commit.raw_id, target_scm) |
|
778 | 779 | c.ancestor_commit = source_scm.get_commit(ancestor_id) |
|
779 | 780 | except Exception: |
|
780 | 781 | c.ancestor_commit = None |
|
781 | 782 | |
|
782 | 783 | c.statuses = source_repo.statuses( |
|
783 | 784 | [x.raw_id for x in c.commit_ranges]) |
|
784 | 785 | |
|
785 | 786 | # auto collapse if we have more than limit |
|
786 | 787 | collapse_limit = diffs.DiffProcessor._collapse_commits_over |
|
787 | 788 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit |
|
788 | 789 | c.compare_mode = compare |
|
789 | 790 | |
|
790 | 791 | c.missing_commits = False |
|
791 | 792 | if (c.missing_requirements or isinstance(source_commit, EmptyCommit) |
|
792 | 793 | or source_commit == target_commit): |
|
793 | 794 | |
|
794 | 795 | c.missing_commits = True |
|
795 | 796 | else: |
|
796 | 797 | |
|
797 | 798 | c.diffset = self._get_diffset( |
|
798 | 799 | commits_source_repo, source_ref_id, target_ref_id, |
|
799 | 800 | target_commit, source_commit, |
|
800 | 801 | diff_limit, file_limit, display_inline_comments) |
|
801 | 802 | |
|
802 | 803 | c.limited_diff = c.diffset.limited_diff |
|
803 | 804 | |
|
804 | 805 | # calculate removed files that are bound to comments |
|
805 | 806 | comment_deleted_files = [ |
|
806 | 807 | fname for fname in display_inline_comments |
|
807 | 808 | if fname not in c.diffset.file_stats] |
|
808 | 809 | |
|
809 | 810 | c.deleted_files_comments = collections.defaultdict(dict) |
|
810 | 811 | for fname, per_line_comments in display_inline_comments.items(): |
|
811 | 812 | if fname in comment_deleted_files: |
|
812 | 813 | c.deleted_files_comments[fname]['stats'] = 0 |
|
813 | 814 | c.deleted_files_comments[fname]['comments'] = list() |
|
814 | 815 | for lno, comments in per_line_comments.items(): |
|
815 | 816 | c.deleted_files_comments[fname]['comments'].extend( |
|
816 | 817 | comments) |
|
817 | 818 | |
|
818 | 819 | # this is a hack to properly display links, when creating PR, the |
|
819 | 820 | # compare view and others uses different notation, and |
|
820 | 821 | # compare_commits.mako renders links based on the target_repo. |
|
821 | 822 | # We need to swap that here to generate it properly on the html side |
|
822 | 823 | c.target_repo = c.source_repo |
|
823 | 824 | |
|
824 | 825 | c.commit_statuses = ChangesetStatus.STATUSES |
|
825 | 826 | |
|
826 | 827 | c.show_version_changes = not pr_closed |
|
827 | 828 | if c.show_version_changes: |
|
828 | 829 | cur_obj = pull_request_at_ver |
|
829 | 830 | prev_obj = prev_pull_request_at_ver |
|
830 | 831 | |
|
831 | 832 | old_commit_ids = prev_obj.revisions |
|
832 | 833 | new_commit_ids = cur_obj.revisions |
|
833 | 834 | commit_changes = PullRequestModel()._calculate_commit_id_changes( |
|
834 | 835 | old_commit_ids, new_commit_ids) |
|
835 | 836 | c.commit_changes_summary = commit_changes |
|
836 | 837 | |
|
837 | 838 | # calculate the diff for commits between versions |
|
838 | 839 | c.commit_changes = [] |
|
839 | 840 | mark = lambda cs, fw: list( |
|
840 | 841 | h.itertools.izip_longest([], cs, fillvalue=fw)) |
|
841 | 842 | for c_type, raw_id in mark(commit_changes.added, 'a') \ |
|
842 | 843 | + mark(commit_changes.removed, 'r') \ |
|
843 | 844 | + mark(commit_changes.common, 'c'): |
|
844 | 845 | |
|
845 | 846 | if raw_id in commit_cache: |
|
846 | 847 | commit = commit_cache[raw_id] |
|
847 | 848 | else: |
|
848 | 849 | try: |
|
849 | 850 | commit = commits_source_repo.get_commit(raw_id) |
|
850 | 851 | except CommitDoesNotExistError: |
|
851 | 852 | # in case we fail extracting still use "dummy" commit |
|
852 | 853 | # for display in commit diff |
|
853 | 854 | commit = h.AttributeDict( |
|
854 | 855 | {'raw_id': raw_id, |
|
855 | 856 | 'message': 'EMPTY or MISSING COMMIT'}) |
|
856 | 857 | c.commit_changes.append([c_type, commit]) |
|
857 | 858 | |
|
858 | 859 | # current user review statuses for each version |
|
859 | 860 | c.review_versions = {} |
|
860 | 861 | if c.rhodecode_user.user_id in allowed_reviewers: |
|
861 | 862 | for co in general_comments: |
|
862 | 863 | if co.author.user_id == c.rhodecode_user.user_id: |
|
863 | 864 | # each comment has a status change |
|
864 | 865 | status = co.status_change |
|
865 | 866 | if status: |
|
866 | 867 | _ver_pr = status[0].comment.pull_request_version_id |
|
867 | 868 | c.review_versions[_ver_pr] = status[0] |
|
868 | 869 | |
|
869 | 870 | return render('/pullrequests/pullrequest_show.mako') |
|
870 | 871 | |
|
871 | 872 | @LoginRequired() |
|
872 | 873 | @NotAnonymous() |
|
873 | 874 | @HasRepoPermissionAnyDecorator( |
|
874 | 875 | 'repository.read', 'repository.write', 'repository.admin') |
|
875 | 876 | @auth.CSRFRequired() |
|
876 | 877 | @jsonify |
|
877 | 878 | def comment(self, repo_name, pull_request_id): |
|
878 | 879 | pull_request_id = safe_int(pull_request_id) |
|
879 | 880 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
880 | 881 | if pull_request.is_closed(): |
|
881 | 882 | log.debug('comment: forbidden because pull request is closed') |
|
882 | 883 | raise HTTPForbidden() |
|
883 | 884 | |
|
884 | 885 | status = request.POST.get('changeset_status', None) |
|
885 | 886 | text = request.POST.get('text') |
|
886 | 887 | comment_type = request.POST.get('comment_type') |
|
887 | 888 | resolves_comment_id = request.POST.get('resolves_comment_id', None) |
|
888 | 889 | close_pull_request = request.POST.get('close_pull_request') |
|
889 | 890 | |
|
890 | 891 | # the logic here should work like following, if we submit close |
|
891 | 892 | # pr comment, use `close_pull_request_with_comment` function |
|
892 | 893 | # else handle regular comment logic |
|
893 | 894 | user = c.rhodecode_user |
|
894 | 895 | repo = c.rhodecode_db_repo |
|
895 | 896 | |
|
896 | 897 | if close_pull_request: |
|
897 | 898 | # only owner or admin or person with write permissions |
|
898 | 899 | allowed_to_close = PullRequestModel().check_user_update( |
|
899 | 900 | pull_request, c.rhodecode_user) |
|
900 | 901 | if not allowed_to_close: |
|
901 | 902 | log.debug('comment: forbidden because not allowed to close ' |
|
902 | 903 | 'pull request %s', pull_request_id) |
|
903 | 904 | raise HTTPForbidden() |
|
904 | 905 | comment, status = PullRequestModel().close_pull_request_with_comment( |
|
905 | 906 | pull_request, user, repo, message=text) |
|
906 | 907 | Session().flush() |
|
907 | 908 | events.trigger( |
|
908 | 909 | events.PullRequestCommentEvent(pull_request, comment)) |
|
909 | 910 | |
|
910 | 911 | else: |
|
911 | 912 | # regular comment case, could be inline, or one with status. |
|
912 | 913 | # for that one we check also permissions |
|
913 | 914 | |
|
914 | 915 | allowed_to_change_status = PullRequestModel().check_user_change_status( |
|
915 | 916 | pull_request, c.rhodecode_user) |
|
916 | 917 | |
|
917 | 918 | if status and allowed_to_change_status: |
|
918 | 919 | message = (_('Status change %(transition_icon)s %(status)s') |
|
919 | 920 | % {'transition_icon': '>', |
|
920 | 921 | 'status': ChangesetStatus.get_status_lbl(status)}) |
|
921 | 922 | text = text or message |
|
922 | 923 | |
|
923 | 924 | comment = CommentsModel().create( |
|
924 | 925 | text=text, |
|
925 | 926 | repo=c.rhodecode_db_repo.repo_id, |
|
926 | 927 | user=c.rhodecode_user.user_id, |
|
927 | 928 | pull_request=pull_request_id, |
|
928 | 929 | f_path=request.POST.get('f_path'), |
|
929 | 930 | line_no=request.POST.get('line'), |
|
930 | 931 | status_change=(ChangesetStatus.get_status_lbl(status) |
|
931 | 932 | if status and allowed_to_change_status else None), |
|
932 | 933 | status_change_type=(status |
|
933 | 934 | if status and allowed_to_change_status else None), |
|
934 | 935 | comment_type=comment_type, |
|
935 | 936 | resolves_comment_id=resolves_comment_id |
|
936 | 937 | ) |
|
937 | 938 | |
|
938 | 939 | if allowed_to_change_status: |
|
939 | 940 | # calculate old status before we change it |
|
940 | 941 | old_calculated_status = pull_request.calculated_review_status() |
|
941 | 942 | |
|
942 | 943 | # get status if set ! |
|
943 | 944 | if status: |
|
944 | 945 | ChangesetStatusModel().set_status( |
|
945 | 946 | c.rhodecode_db_repo.repo_id, |
|
946 | 947 | status, |
|
947 | 948 | c.rhodecode_user.user_id, |
|
948 | 949 | comment, |
|
949 | 950 | pull_request=pull_request_id |
|
950 | 951 | ) |
|
951 | 952 | |
|
952 | 953 | Session().flush() |
|
953 | 954 | events.trigger( |
|
954 | 955 | events.PullRequestCommentEvent(pull_request, comment)) |
|
955 | 956 | |
|
956 | 957 | # we now calculate the status of pull request, and based on that |
|
957 | 958 | # calculation we set the commits status |
|
958 | 959 | calculated_status = pull_request.calculated_review_status() |
|
959 | 960 | if old_calculated_status != calculated_status: |
|
960 | 961 | PullRequestModel()._trigger_pull_request_hook( |
|
961 | 962 | pull_request, c.rhodecode_user, 'review_status_change') |
|
962 | 963 | |
|
963 | 964 | Session().commit() |
|
964 | 965 | |
|
965 | 966 | if not request.is_xhr: |
|
966 | 967 | return redirect(h.url('pullrequest_show', repo_name=repo_name, |
|
967 | 968 | pull_request_id=pull_request_id)) |
|
968 | 969 | |
|
969 | 970 | data = { |
|
970 | 971 | 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))), |
|
971 | 972 | } |
|
972 | 973 | if comment: |
|
973 | 974 | c.co = comment |
|
974 | 975 | rendered_comment = render('changeset/changeset_comment_block.mako') |
|
975 | 976 | data.update(comment.get_dict()) |
|
976 | 977 | data.update({'rendered_text': rendered_comment}) |
|
977 | 978 | |
|
978 | 979 | return data |
|
979 | 980 | |
|
980 | 981 | @LoginRequired() |
|
981 | 982 | @NotAnonymous() |
|
982 | 983 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
983 | 984 | 'repository.admin') |
|
984 | 985 | @auth.CSRFRequired() |
|
985 | 986 | @jsonify |
|
986 | 987 | def delete_comment(self, repo_name, comment_id): |
|
987 | 988 | return self._delete_comment(comment_id) |
|
988 | 989 | |
|
989 | 990 | def _delete_comment(self, comment_id): |
|
990 | 991 | comment_id = safe_int(comment_id) |
|
991 | 992 | co = ChangesetComment.get_or_404(comment_id) |
|
992 | 993 | if co.pull_request.is_closed(): |
|
993 | 994 | # don't allow deleting comments on closed pull request |
|
994 | 995 | raise HTTPForbidden() |
|
995 | 996 | |
|
996 | 997 | is_owner = co.author.user_id == c.rhodecode_user.user_id |
|
997 | 998 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name) |
|
998 | 999 | if h.HasPermissionAny('hg.admin')() or is_repo_admin or is_owner: |
|
999 | 1000 | old_calculated_status = co.pull_request.calculated_review_status() |
|
1000 | CommentsModel().delete(comment=co) | |
|
1001 | CommentsModel().delete(comment=co, user=c.rhodecode_user) | |
|
1001 | 1002 | Session().commit() |
|
1002 | 1003 | calculated_status = co.pull_request.calculated_review_status() |
|
1003 | 1004 | if old_calculated_status != calculated_status: |
|
1004 | 1005 | PullRequestModel()._trigger_pull_request_hook( |
|
1005 | 1006 | co.pull_request, c.rhodecode_user, 'review_status_change') |
|
1006 | 1007 | return True |
|
1007 | 1008 | else: |
|
1008 | 1009 | raise HTTPForbidden() |
@@ -1,240 +1,257 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2017-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | import datetime |
|
23 | 23 | |
|
24 | 24 | from rhodecode.model import meta |
|
25 | 25 | from rhodecode.model.db import User, UserLog, Repository |
|
26 | 26 | |
|
27 | 27 | |
|
28 | 28 | log = logging.getLogger(__name__) |
|
29 | 29 | |
|
30 | 30 | # action as key, and expected action_data as value |
|
31 | ACTIONS = { | |
|
31 | ACTIONS_V1 = { | |
|
32 | 32 | 'user.login.success': {'user_agent': ''}, |
|
33 | 33 | 'user.login.failure': {'user_agent': ''}, |
|
34 | 34 | 'user.logout': {'user_agent': ''}, |
|
35 | 35 | 'user.password.reset_request': {}, |
|
36 | 36 | 'user.push': {'user_agent': '', 'commit_ids': []}, |
|
37 | 37 | 'user.pull': {'user_agent': ''}, |
|
38 | 38 | |
|
39 | 39 | 'user.create': {'data': {}}, |
|
40 | 40 | 'user.delete': {'old_data': {}}, |
|
41 | 41 | 'user.edit': {'old_data': {}}, |
|
42 | 42 | 'user.edit.permissions': {}, |
|
43 | 43 | 'user.edit.ip.add': {}, |
|
44 | 44 | 'user.edit.ip.delete': {}, |
|
45 | 45 | 'user.edit.token.add': {}, |
|
46 | 46 | 'user.edit.token.delete': {}, |
|
47 | 47 | 'user.edit.email.add': {}, |
|
48 | 48 | 'user.edit.email.delete': {}, |
|
49 | 49 | 'user.edit.password_reset.enabled': {}, |
|
50 | 50 | 'user.edit.password_reset.disabled': {}, |
|
51 | 51 | |
|
52 | 52 | 'user_group.create': {'data': {}}, |
|
53 | 53 | 'user_group.delete': {'old_data': {}}, |
|
54 | 54 | 'user_group.edit': {'old_data': {}}, |
|
55 | 55 | 'user_group.edit.permissions': {}, |
|
56 | 56 | 'user_group.edit.member.add': {}, |
|
57 | 57 | 'user_group.edit.member.delete': {}, |
|
58 | 58 | |
|
59 | 59 | 'repo.create': {'data': {}}, |
|
60 | 60 | 'repo.fork': {'data': {}}, |
|
61 | 61 | 'repo.edit': {'old_data': {}}, |
|
62 | 62 | 'repo.edit.permissions': {}, |
|
63 | 63 | 'repo.delete': {'old_data': {}}, |
|
64 | 64 | 'repo.commit.strip': {}, |
|
65 | 65 | 'repo.archive.download': {}, |
|
66 | 66 | |
|
67 | 'repo.pull_request.create': '', | |
|
68 | 'repo.pull_request.edit': '', | |
|
69 | 'repo.pull_request.delete': '', | |
|
70 | 'repo.pull_request.close': '', | |
|
71 | 'repo.pull_request.merge': '', | |
|
72 | 'repo.pull_request.vote': '', | |
|
73 | 'repo.pull_request.comment.create': '', | |
|
74 | 'repo.pull_request.comment.delete': '', | |
|
75 | ||
|
76 | 'repo.pull_request.reviewer.add': '', | |
|
77 | 'repo.pull_request.reviewer.delete': '', | |
|
78 | ||
|
79 | 'repo.commit.comment.create': '', | |
|
80 | 'repo.commit.comment.delete': '', | |
|
81 | 'repo.commit.vote': '', | |
|
82 | ||
|
67 | 83 | 'repo_group.create': {'data': {}}, |
|
68 | 84 | 'repo_group.edit': {'old_data': {}}, |
|
69 | 85 | 'repo_group.edit.permissions': {}, |
|
70 | 86 | 'repo_group.delete': {'old_data': {}}, |
|
71 | 87 | } |
|
88 | ACTIONS = ACTIONS_V1 | |
|
72 | 89 | |
|
73 | 90 | SOURCE_WEB = 'source_web' |
|
74 | 91 | SOURCE_API = 'source_api' |
|
75 | 92 | |
|
76 | 93 | |
|
77 | 94 | class UserWrap(object): |
|
78 | 95 | """ |
|
79 | 96 | Fake object used to imitate AuthUser |
|
80 | 97 | """ |
|
81 | 98 | |
|
82 | 99 | def __init__(self, user_id=None, username=None, ip_addr=None): |
|
83 | 100 | self.user_id = user_id |
|
84 | 101 | self.username = username |
|
85 | 102 | self.ip_addr = ip_addr |
|
86 | 103 | |
|
87 | 104 | |
|
88 | 105 | class RepoWrap(object): |
|
89 | 106 | """ |
|
90 | 107 | Fake object used to imitate RepoObject that audit logger requires |
|
91 | 108 | """ |
|
92 | 109 | |
|
93 | 110 | def __init__(self, repo_id=None, repo_name=None): |
|
94 | 111 | self.repo_id = repo_id |
|
95 | 112 | self.repo_name = repo_name |
|
96 | 113 | |
|
97 | 114 | |
|
98 | 115 | def _store_log(action_name, action_data, user_id, username, user_data, |
|
99 | 116 | ip_address, repository_id, repository_name): |
|
100 | 117 | user_log = UserLog() |
|
101 | 118 | user_log.version = UserLog.VERSION_2 |
|
102 | 119 | |
|
103 | 120 | user_log.action = action_name |
|
104 | 121 | user_log.action_data = action_data |
|
105 | 122 | |
|
106 | 123 | user_log.user_ip = ip_address |
|
107 | 124 | |
|
108 | 125 | user_log.user_id = user_id |
|
109 | 126 | user_log.username = username |
|
110 | 127 | user_log.user_data = user_data |
|
111 | 128 | |
|
112 | 129 | user_log.repository_id = repository_id |
|
113 | 130 | user_log.repository_name = repository_name |
|
114 | 131 | |
|
115 | 132 | user_log.action_date = datetime.datetime.now() |
|
116 | 133 | |
|
117 | 134 | log.info('AUDIT: Logging action: `%s` by user:id:%s[%s] ip:%s', |
|
118 | 135 | action_name, user_id, username, ip_address) |
|
119 | 136 | |
|
120 | 137 | return user_log |
|
121 | 138 | |
|
122 | 139 | |
|
123 | 140 | def store_web(*args, **kwargs): |
|
124 | 141 | if 'action_data' not in kwargs: |
|
125 | 142 | kwargs['action_data'] = {} |
|
126 | 143 | kwargs['action_data'].update({ |
|
127 | 144 | 'source': SOURCE_WEB |
|
128 | 145 | }) |
|
129 | 146 | return store(*args, **kwargs) |
|
130 | 147 | |
|
131 | 148 | |
|
132 | 149 | def store_api(*args, **kwargs): |
|
133 | 150 | if 'action_data' not in kwargs: |
|
134 | 151 | kwargs['action_data'] = {} |
|
135 | 152 | kwargs['action_data'].update({ |
|
136 | 153 | 'source': SOURCE_API |
|
137 | 154 | }) |
|
138 | 155 | return store(*args, **kwargs) |
|
139 | 156 | |
|
140 | 157 | |
|
141 | 158 | def store(action, user, action_data=None, user_data=None, ip_addr=None, |
|
142 | 159 | repo=None, sa_session=None, commit=False): |
|
143 | 160 | """ |
|
144 | 161 | Audit logger for various actions made by users, typically this |
|
145 | 162 | results in a call such:: |
|
146 | 163 | |
|
147 | 164 | from rhodecode.lib import audit_logger |
|
148 | 165 | |
|
149 | 166 | audit_logger.store( |
|
150 | 167 | action='repo.edit', user=self._rhodecode_user) |
|
151 | 168 | audit_logger.store( |
|
152 | 169 | action='repo.delete', action_data={'data': repo_data}, |
|
153 | 170 | user=audit_logger.UserWrap(username='itried-login', ip_addr='8.8.8.8')) |
|
154 | 171 | |
|
155 | 172 | # repo action |
|
156 | 173 | audit_logger.store( |
|
157 | 174 | action='repo.delete', |
|
158 | 175 | user=audit_logger.UserWrap(username='itried-login', ip_addr='8.8.8.8'), |
|
159 | 176 | repo=audit_logger.RepoWrap(repo_name='some-repo')) |
|
160 | 177 | |
|
161 | 178 | # repo action, when we know and have the repository object already |
|
162 | 179 | audit_logger.store( |
|
163 | 180 | action='repo.delete', |
|
164 | 181 | action_data={'source': audit_logger.SOURCE_WEB, }, |
|
165 | 182 | user=self._rhodecode_user, |
|
166 | 183 | repo=repo_object) |
|
167 | 184 | |
|
168 | 185 | # alternative wrapper to the above |
|
169 | 186 | audit_logger.store_web( |
|
170 | 187 | action='repo.delete', |
|
171 | 188 | action_data={}, |
|
172 | 189 | user=self._rhodecode_user, |
|
173 | 190 | repo=repo_object) |
|
174 | 191 | |
|
175 | 192 | # without an user ? |
|
176 | 193 | audit_logger.store( |
|
177 | 194 | action='user.login.failure', |
|
178 | 195 | user=audit_logger.UserWrap( |
|
179 | 196 | username=self.request.params.get('username'), |
|
180 | 197 | ip_addr=self.request.remote_addr)) |
|
181 | 198 | |
|
182 | 199 | """ |
|
183 | 200 | from rhodecode.lib.utils2 import safe_unicode |
|
184 | 201 | from rhodecode.lib.auth import AuthUser |
|
185 | 202 | |
|
186 | 203 | action_spec = ACTIONS.get(action, None) |
|
187 | 204 | if action_spec is None: |
|
188 | 205 | raise ValueError('Action `{}` is not supported'.format(action)) |
|
189 | 206 | |
|
190 | 207 | if not sa_session: |
|
191 | 208 | sa_session = meta.Session() |
|
192 | 209 | |
|
193 | 210 | try: |
|
194 | 211 | username = getattr(user, 'username', None) |
|
195 | 212 | if not username: |
|
196 | 213 | pass |
|
197 | 214 | |
|
198 | 215 | user_id = getattr(user, 'user_id', None) |
|
199 | 216 | if not user_id: |
|
200 | 217 | # maybe we have username ? Try to figure user_id from username |
|
201 | 218 | if username: |
|
202 | 219 | user_id = getattr( |
|
203 | 220 | User.get_by_username(username), 'user_id', None) |
|
204 | 221 | |
|
205 | 222 | ip_addr = ip_addr or getattr(user, 'ip_addr', None) |
|
206 | 223 | if not ip_addr: |
|
207 | 224 | pass |
|
208 | 225 | |
|
209 | 226 | if not user_data: |
|
210 | 227 | # try to get this from the auth user |
|
211 | 228 | if isinstance(user, AuthUser): |
|
212 | 229 | user_data = { |
|
213 | 230 | 'username': user.username, |
|
214 | 231 | 'email': user.email, |
|
215 | 232 | } |
|
216 | 233 | |
|
217 | 234 | repository_name = getattr(repo, 'repo_name', None) |
|
218 | 235 | repository_id = getattr(repo, 'repo_id', None) |
|
219 | 236 | if not repository_id: |
|
220 | 237 | # maybe we have repo_name ? Try to figure repo_id from repo_name |
|
221 | 238 | if repository_name: |
|
222 | 239 | repository_id = getattr( |
|
223 | 240 | Repository.get_by_repo_name(repository_name), 'repo_id', None) |
|
224 | 241 | |
|
225 | 242 | user_log = _store_log( |
|
226 | 243 | action_name=safe_unicode(action), |
|
227 | 244 | action_data=action_data or {}, |
|
228 | 245 | user_id=user_id, |
|
229 | 246 | username=username, |
|
230 | 247 | user_data=user_data or {}, |
|
231 | 248 | ip_address=safe_unicode(ip_addr), |
|
232 | 249 | repository_id=repository_id, |
|
233 | 250 | repository_name=repository_name |
|
234 | 251 | ) |
|
235 | 252 | sa_session.add(user_log) |
|
236 | 253 | if commit: |
|
237 | 254 | sa_session.commit() |
|
238 | 255 | |
|
239 | 256 | except Exception: |
|
240 | 257 | log.exception('AUDIT: failed to store audit log') |
@@ -1,1044 +1,982 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Utilities library for RhodeCode |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import datetime |
|
26 | 26 | import decorator |
|
27 | 27 | import json |
|
28 | 28 | import logging |
|
29 | 29 | import os |
|
30 | 30 | import re |
|
31 | 31 | import shutil |
|
32 | 32 | import tempfile |
|
33 | 33 | import traceback |
|
34 | 34 | import tarfile |
|
35 | 35 | import warnings |
|
36 | 36 | import hashlib |
|
37 | 37 | from os.path import join as jn |
|
38 | 38 | |
|
39 | 39 | import paste |
|
40 | 40 | import pkg_resources |
|
41 | 41 | from paste.script.command import Command, BadCommand |
|
42 | 42 | from webhelpers.text import collapse, remove_formatting, strip_tags |
|
43 | 43 | from mako import exceptions |
|
44 | 44 | from pyramid.threadlocal import get_current_registry |
|
45 | 45 | from pyramid.request import Request |
|
46 | 46 | |
|
47 | 47 | from rhodecode.lib.fakemod import create_module |
|
48 | 48 | from rhodecode.lib.vcs.backends.base import Config |
|
49 | 49 | from rhodecode.lib.vcs.exceptions import VCSError |
|
50 | 50 | from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend |
|
51 | 51 | from rhodecode.lib.utils2 import ( |
|
52 | 52 | safe_str, safe_unicode, get_current_rhodecode_user, md5) |
|
53 | 53 | from rhodecode.model import meta |
|
54 | 54 | from rhodecode.model.db import ( |
|
55 | 55 | Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup) |
|
56 | 56 | from rhodecode.model.meta import Session |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | log = logging.getLogger(__name__) |
|
60 | 60 | |
|
61 | 61 | REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*') |
|
62 | 62 | |
|
63 | 63 | # String which contains characters that are not allowed in slug names for |
|
64 | 64 | # repositories or repository groups. It is properly escaped to use it in |
|
65 | 65 | # regular expressions. |
|
66 | 66 | SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:') |
|
67 | 67 | |
|
68 | 68 | # Regex that matches forbidden characters in repo/group slugs. |
|
69 | 69 | SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS)) |
|
70 | 70 | |
|
71 | 71 | # Regex that matches allowed characters in repo/group slugs. |
|
72 | 72 | SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS)) |
|
73 | 73 | |
|
74 | 74 | # Regex that matches whole repo/group slugs. |
|
75 | 75 | SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS)) |
|
76 | 76 | |
|
77 | 77 | _license_cache = None |
|
78 | 78 | |
|
79 | 79 | |
|
80 | 80 | def repo_name_slug(value): |
|
81 | 81 | """ |
|
82 | 82 | Return slug of name of repository |
|
83 | 83 | This function is called on each creation/modification |
|
84 | 84 | of repository to prevent bad names in repo |
|
85 | 85 | """ |
|
86 | 86 | replacement_char = '-' |
|
87 | 87 | |
|
88 | 88 | slug = remove_formatting(value) |
|
89 | 89 | slug = SLUG_BAD_CHAR_RE.sub('', slug) |
|
90 | 90 | slug = re.sub('[\s]+', '-', slug) |
|
91 | 91 | slug = collapse(slug, replacement_char) |
|
92 | 92 | return slug |
|
93 | 93 | |
|
94 | 94 | |
|
95 | 95 | #============================================================================== |
|
96 | 96 | # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS |
|
97 | 97 | #============================================================================== |
|
98 | 98 | def get_repo_slug(request): |
|
99 | 99 | if isinstance(request, Request) and getattr(request, 'db_repo', None): |
|
100 | 100 | # pyramid |
|
101 | 101 | _repo = request.db_repo.repo_name |
|
102 | 102 | else: |
|
103 | 103 | # TODO(marcink): remove after pylons migration... |
|
104 | 104 | _repo = request.environ['pylons.routes_dict'].get('repo_name') |
|
105 | 105 | |
|
106 | 106 | if _repo: |
|
107 | 107 | _repo = _repo.rstrip('/') |
|
108 | 108 | return _repo |
|
109 | 109 | |
|
110 | 110 | |
|
111 | 111 | def get_repo_group_slug(request): |
|
112 | 112 | if isinstance(request, Request) and getattr(request, 'matchdict', None): |
|
113 | 113 | # pyramid |
|
114 | 114 | _group = request.matchdict.get('repo_group_name') |
|
115 | 115 | else: |
|
116 | 116 | _group = request.environ['pylons.routes_dict'].get('group_name') |
|
117 | 117 | |
|
118 | 118 | if _group: |
|
119 | 119 | _group = _group.rstrip('/') |
|
120 | 120 | return _group |
|
121 | 121 | |
|
122 | 122 | |
|
123 | 123 | def get_user_group_slug(request): |
|
124 | 124 | if isinstance(request, Request) and getattr(request, 'matchdict', None): |
|
125 | 125 | # pyramid |
|
126 | 126 | _group = request.matchdict.get('user_group_id') |
|
127 | 127 | else: |
|
128 | 128 | _group = request.environ['pylons.routes_dict'].get('user_group_id') |
|
129 | 129 | |
|
130 | 130 | try: |
|
131 | 131 | _group = UserGroup.get(_group) |
|
132 | 132 | if _group: |
|
133 | 133 | _group = _group.users_group_name |
|
134 | 134 | except Exception: |
|
135 | 135 | log.debug(traceback.format_exc()) |
|
136 | 136 | # catch all failures here |
|
137 | 137 | pass |
|
138 | 138 | |
|
139 | 139 | return _group |
|
140 | 140 | |
|
141 | 141 | |
|
142 | def action_logger(user, action, repo, ipaddr='', sa=None, commit=False): | |
|
143 | """ | |
|
144 | Action logger for various actions made by users | |
|
145 | ||
|
146 | :param user: user that made this action, can be a unique username string or | |
|
147 | object containing user_id attribute | |
|
148 | :param action: action to log, should be on of predefined unique actions for | |
|
149 | easy translations | |
|
150 | :param repo: string name of repository or object containing repo_id, | |
|
151 | that action was made on | |
|
152 | :param ipaddr: optional ip address from what the action was made | |
|
153 | :param sa: optional sqlalchemy session | |
|
154 | ||
|
155 | """ | |
|
156 | ||
|
157 | if not sa: | |
|
158 | sa = meta.Session() | |
|
159 | # if we don't get explicit IP address try to get one from registered user | |
|
160 | # in tmpl context var | |
|
161 | if not ipaddr: | |
|
162 | ipaddr = getattr(get_current_rhodecode_user(), 'ip_addr', '') | |
|
163 | ||
|
164 | try: | |
|
165 | if getattr(user, 'user_id', None): | |
|
166 | user_obj = User.get(user.user_id) | |
|
167 | elif isinstance(user, basestring): | |
|
168 | user_obj = User.get_by_username(user) | |
|
169 | else: | |
|
170 | raise Exception('You have to provide a user object or a username') | |
|
171 | ||
|
172 | if getattr(repo, 'repo_id', None): | |
|
173 | repo_obj = Repository.get(repo.repo_id) | |
|
174 | repo_name = repo_obj.repo_name | |
|
175 | elif isinstance(repo, basestring): | |
|
176 | repo_name = repo.lstrip('/') | |
|
177 | repo_obj = Repository.get_by_repo_name(repo_name) | |
|
178 | else: | |
|
179 | repo_obj = None | |
|
180 | repo_name = '' | |
|
181 | ||
|
182 | user_log = UserLog() | |
|
183 | user_log.user_id = user_obj.user_id | |
|
184 | user_log.username = user_obj.username | |
|
185 | action = safe_unicode(action) | |
|
186 | user_log.action = action[:1200000] | |
|
187 | ||
|
188 | user_log.repository = repo_obj | |
|
189 | user_log.repository_name = repo_name | |
|
190 | ||
|
191 | user_log.action_date = datetime.datetime.now() | |
|
192 | user_log.user_ip = ipaddr | |
|
193 | sa.add(user_log) | |
|
194 | ||
|
195 | log.info('Logging action:`%s` on repo:`%s` by user:%s ip:%s', | |
|
196 | action, safe_unicode(repo), user_obj, ipaddr) | |
|
197 | if commit: | |
|
198 | sa.commit() | |
|
199 | except Exception: | |
|
200 | log.error(traceback.format_exc()) | |
|
201 | raise | |
|
202 | ||
|
203 | ||
|
204 | 142 | def get_filesystem_repos(path, recursive=False, skip_removed_repos=True): |
|
205 | 143 | """ |
|
206 | 144 | Scans given path for repos and return (name,(type,path)) tuple |
|
207 | 145 | |
|
208 | 146 | :param path: path to scan for repositories |
|
209 | 147 | :param recursive: recursive search and return names with subdirs in front |
|
210 | 148 | """ |
|
211 | 149 | |
|
212 | 150 | # remove ending slash for better results |
|
213 | 151 | path = path.rstrip(os.sep) |
|
214 | 152 | log.debug('now scanning in %s location recursive:%s...', path, recursive) |
|
215 | 153 | |
|
216 | 154 | def _get_repos(p): |
|
217 | 155 | dirpaths = _get_dirpaths(p) |
|
218 | 156 | if not _is_dir_writable(p): |
|
219 | 157 | log.warning('repo path without write access: %s', p) |
|
220 | 158 | |
|
221 | 159 | for dirpath in dirpaths: |
|
222 | 160 | if os.path.isfile(os.path.join(p, dirpath)): |
|
223 | 161 | continue |
|
224 | 162 | cur_path = os.path.join(p, dirpath) |
|
225 | 163 | |
|
226 | 164 | # skip removed repos |
|
227 | 165 | if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath): |
|
228 | 166 | continue |
|
229 | 167 | |
|
230 | 168 | #skip .<somethin> dirs |
|
231 | 169 | if dirpath.startswith('.'): |
|
232 | 170 | continue |
|
233 | 171 | |
|
234 | 172 | try: |
|
235 | 173 | scm_info = get_scm(cur_path) |
|
236 | 174 | yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info |
|
237 | 175 | except VCSError: |
|
238 | 176 | if not recursive: |
|
239 | 177 | continue |
|
240 | 178 | #check if this dir containts other repos for recursive scan |
|
241 | 179 | rec_path = os.path.join(p, dirpath) |
|
242 | 180 | if os.path.isdir(rec_path): |
|
243 | 181 | for inner_scm in _get_repos(rec_path): |
|
244 | 182 | yield inner_scm |
|
245 | 183 | |
|
246 | 184 | return _get_repos(path) |
|
247 | 185 | |
|
248 | 186 | |
|
249 | 187 | def _get_dirpaths(p): |
|
250 | 188 | try: |
|
251 | 189 | # OS-independable way of checking if we have at least read-only |
|
252 | 190 | # access or not. |
|
253 | 191 | dirpaths = os.listdir(p) |
|
254 | 192 | except OSError: |
|
255 | 193 | log.warning('ignoring repo path without read access: %s', p) |
|
256 | 194 | return [] |
|
257 | 195 | |
|
258 | 196 | # os.listpath has a tweak: If a unicode is passed into it, then it tries to |
|
259 | 197 | # decode paths and suddenly returns unicode objects itself. The items it |
|
260 | 198 | # cannot decode are returned as strings and cause issues. |
|
261 | 199 | # |
|
262 | 200 | # Those paths are ignored here until a solid solution for path handling has |
|
263 | 201 | # been built. |
|
264 | 202 | expected_type = type(p) |
|
265 | 203 | |
|
266 | 204 | def _has_correct_type(item): |
|
267 | 205 | if type(item) is not expected_type: |
|
268 | 206 | log.error( |
|
269 | 207 | u"Ignoring path %s since it cannot be decoded into unicode.", |
|
270 | 208 | # Using "repr" to make sure that we see the byte value in case |
|
271 | 209 | # of support. |
|
272 | 210 | repr(item)) |
|
273 | 211 | return False |
|
274 | 212 | return True |
|
275 | 213 | |
|
276 | 214 | dirpaths = [item for item in dirpaths if _has_correct_type(item)] |
|
277 | 215 | |
|
278 | 216 | return dirpaths |
|
279 | 217 | |
|
280 | 218 | |
|
281 | 219 | def _is_dir_writable(path): |
|
282 | 220 | """ |
|
283 | 221 | Probe if `path` is writable. |
|
284 | 222 | |
|
285 | 223 | Due to trouble on Cygwin / Windows, this is actually probing if it is |
|
286 | 224 | possible to create a file inside of `path`, stat does not produce reliable |
|
287 | 225 | results in this case. |
|
288 | 226 | """ |
|
289 | 227 | try: |
|
290 | 228 | with tempfile.TemporaryFile(dir=path): |
|
291 | 229 | pass |
|
292 | 230 | except OSError: |
|
293 | 231 | return False |
|
294 | 232 | return True |
|
295 | 233 | |
|
296 | 234 | |
|
297 | 235 | def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None): |
|
298 | 236 | """ |
|
299 | 237 | Returns True if given path is a valid repository False otherwise. |
|
300 | 238 | If expect_scm param is given also, compare if given scm is the same |
|
301 | 239 | as expected from scm parameter. If explicit_scm is given don't try to |
|
302 | 240 | detect the scm, just use the given one to check if repo is valid |
|
303 | 241 | |
|
304 | 242 | :param repo_name: |
|
305 | 243 | :param base_path: |
|
306 | 244 | :param expect_scm: |
|
307 | 245 | :param explicit_scm: |
|
308 | 246 | |
|
309 | 247 | :return True: if given path is a valid repository |
|
310 | 248 | """ |
|
311 | 249 | full_path = os.path.join(safe_str(base_path), safe_str(repo_name)) |
|
312 | 250 | log.debug('Checking if `%s` is a valid path for repository. ' |
|
313 | 251 | 'Explicit type: %s', repo_name, explicit_scm) |
|
314 | 252 | |
|
315 | 253 | try: |
|
316 | 254 | if explicit_scm: |
|
317 | 255 | detected_scms = [get_scm_backend(explicit_scm)] |
|
318 | 256 | else: |
|
319 | 257 | detected_scms = get_scm(full_path) |
|
320 | 258 | |
|
321 | 259 | if expect_scm: |
|
322 | 260 | return detected_scms[0] == expect_scm |
|
323 | 261 | log.debug('path: %s is an vcs object:%s', full_path, detected_scms) |
|
324 | 262 | return True |
|
325 | 263 | except VCSError: |
|
326 | 264 | log.debug('path: %s is not a valid repo !', full_path) |
|
327 | 265 | return False |
|
328 | 266 | |
|
329 | 267 | |
|
330 | 268 | def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False): |
|
331 | 269 | """ |
|
332 | 270 | Returns True if given path is a repository group, False otherwise |
|
333 | 271 | |
|
334 | 272 | :param repo_name: |
|
335 | 273 | :param base_path: |
|
336 | 274 | """ |
|
337 | 275 | full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name)) |
|
338 | 276 | log.debug('Checking if `%s` is a valid path for repository group', |
|
339 | 277 | repo_group_name) |
|
340 | 278 | |
|
341 | 279 | # check if it's not a repo |
|
342 | 280 | if is_valid_repo(repo_group_name, base_path): |
|
343 | 281 | log.debug('Repo called %s exist, it is not a valid ' |
|
344 | 282 | 'repo group' % repo_group_name) |
|
345 | 283 | return False |
|
346 | 284 | |
|
347 | 285 | try: |
|
348 | 286 | # we need to check bare git repos at higher level |
|
349 | 287 | # since we might match branches/hooks/info/objects or possible |
|
350 | 288 | # other things inside bare git repo |
|
351 | 289 | scm_ = get_scm(os.path.dirname(full_path)) |
|
352 | 290 | log.debug('path: %s is a vcs object:%s, not valid ' |
|
353 | 291 | 'repo group' % (full_path, scm_)) |
|
354 | 292 | return False |
|
355 | 293 | except VCSError: |
|
356 | 294 | pass |
|
357 | 295 | |
|
358 | 296 | # check if it's a valid path |
|
359 | 297 | if skip_path_check or os.path.isdir(full_path): |
|
360 | 298 | log.debug('path: %s is a valid repo group !', full_path) |
|
361 | 299 | return True |
|
362 | 300 | |
|
363 | 301 | log.debug('path: %s is not a valid repo group !', full_path) |
|
364 | 302 | return False |
|
365 | 303 | |
|
366 | 304 | |
|
367 | 305 | def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'): |
|
368 | 306 | while True: |
|
369 | 307 | ok = raw_input(prompt) |
|
370 | 308 | if ok.lower() in ('y', 'ye', 'yes'): |
|
371 | 309 | return True |
|
372 | 310 | if ok.lower() in ('n', 'no', 'nop', 'nope'): |
|
373 | 311 | return False |
|
374 | 312 | retries = retries - 1 |
|
375 | 313 | if retries < 0: |
|
376 | 314 | raise IOError |
|
377 | 315 | print(complaint) |
|
378 | 316 | |
|
379 | 317 | # propagated from mercurial documentation |
|
380 | 318 | ui_sections = [ |
|
381 | 319 | 'alias', 'auth', |
|
382 | 320 | 'decode/encode', 'defaults', |
|
383 | 321 | 'diff', 'email', |
|
384 | 322 | 'extensions', 'format', |
|
385 | 323 | 'merge-patterns', 'merge-tools', |
|
386 | 324 | 'hooks', 'http_proxy', |
|
387 | 325 | 'smtp', 'patch', |
|
388 | 326 | 'paths', 'profiling', |
|
389 | 327 | 'server', 'trusted', |
|
390 | 328 | 'ui', 'web', ] |
|
391 | 329 | |
|
392 | 330 | |
|
393 | 331 | def config_data_from_db(clear_session=True, repo=None): |
|
394 | 332 | """ |
|
395 | 333 | Read the configuration data from the database and return configuration |
|
396 | 334 | tuples. |
|
397 | 335 | """ |
|
398 | 336 | from rhodecode.model.settings import VcsSettingsModel |
|
399 | 337 | |
|
400 | 338 | config = [] |
|
401 | 339 | |
|
402 | 340 | sa = meta.Session() |
|
403 | 341 | settings_model = VcsSettingsModel(repo=repo, sa=sa) |
|
404 | 342 | |
|
405 | 343 | ui_settings = settings_model.get_ui_settings() |
|
406 | 344 | |
|
407 | 345 | for setting in ui_settings: |
|
408 | 346 | if setting.active: |
|
409 | 347 | log.debug( |
|
410 | 348 | 'settings ui from db: [%s] %s=%s', |
|
411 | 349 | setting.section, setting.key, setting.value) |
|
412 | 350 | config.append(( |
|
413 | 351 | safe_str(setting.section), safe_str(setting.key), |
|
414 | 352 | safe_str(setting.value))) |
|
415 | 353 | if setting.key == 'push_ssl': |
|
416 | 354 | # force set push_ssl requirement to False, rhodecode |
|
417 | 355 | # handles that |
|
418 | 356 | config.append(( |
|
419 | 357 | safe_str(setting.section), safe_str(setting.key), False)) |
|
420 | 358 | if clear_session: |
|
421 | 359 | meta.Session.remove() |
|
422 | 360 | |
|
423 | 361 | # TODO: mikhail: probably it makes no sense to re-read hooks information. |
|
424 | 362 | # It's already there and activated/deactivated |
|
425 | 363 | skip_entries = [] |
|
426 | 364 | enabled_hook_classes = get_enabled_hook_classes(ui_settings) |
|
427 | 365 | if 'pull' not in enabled_hook_classes: |
|
428 | 366 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL)) |
|
429 | 367 | if 'push' not in enabled_hook_classes: |
|
430 | 368 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH)) |
|
431 | 369 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH)) |
|
432 | 370 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY)) |
|
433 | 371 | |
|
434 | 372 | config = [entry for entry in config if entry[:2] not in skip_entries] |
|
435 | 373 | |
|
436 | 374 | return config |
|
437 | 375 | |
|
438 | 376 | |
|
439 | 377 | def make_db_config(clear_session=True, repo=None): |
|
440 | 378 | """ |
|
441 | 379 | Create a :class:`Config` instance based on the values in the database. |
|
442 | 380 | """ |
|
443 | 381 | config = Config() |
|
444 | 382 | config_data = config_data_from_db(clear_session=clear_session, repo=repo) |
|
445 | 383 | for section, option, value in config_data: |
|
446 | 384 | config.set(section, option, value) |
|
447 | 385 | return config |
|
448 | 386 | |
|
449 | 387 | |
|
450 | 388 | def get_enabled_hook_classes(ui_settings): |
|
451 | 389 | """ |
|
452 | 390 | Return the enabled hook classes. |
|
453 | 391 | |
|
454 | 392 | :param ui_settings: List of ui_settings as returned |
|
455 | 393 | by :meth:`VcsSettingsModel.get_ui_settings` |
|
456 | 394 | |
|
457 | 395 | :return: a list with the enabled hook classes. The order is not guaranteed. |
|
458 | 396 | :rtype: list |
|
459 | 397 | """ |
|
460 | 398 | enabled_hooks = [] |
|
461 | 399 | active_hook_keys = [ |
|
462 | 400 | key for section, key, value, active in ui_settings |
|
463 | 401 | if section == 'hooks' and active] |
|
464 | 402 | |
|
465 | 403 | hook_names = { |
|
466 | 404 | RhodeCodeUi.HOOK_PUSH: 'push', |
|
467 | 405 | RhodeCodeUi.HOOK_PULL: 'pull', |
|
468 | 406 | RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size' |
|
469 | 407 | } |
|
470 | 408 | |
|
471 | 409 | for key in active_hook_keys: |
|
472 | 410 | hook = hook_names.get(key) |
|
473 | 411 | if hook: |
|
474 | 412 | enabled_hooks.append(hook) |
|
475 | 413 | |
|
476 | 414 | return enabled_hooks |
|
477 | 415 | |
|
478 | 416 | |
|
479 | 417 | def set_rhodecode_config(config): |
|
480 | 418 | """ |
|
481 | 419 | Updates pylons config with new settings from database |
|
482 | 420 | |
|
483 | 421 | :param config: |
|
484 | 422 | """ |
|
485 | 423 | from rhodecode.model.settings import SettingsModel |
|
486 | 424 | app_settings = SettingsModel().get_all_settings() |
|
487 | 425 | |
|
488 | 426 | for k, v in app_settings.items(): |
|
489 | 427 | config[k] = v |
|
490 | 428 | |
|
491 | 429 | |
|
492 | 430 | def get_rhodecode_realm(): |
|
493 | 431 | """ |
|
494 | 432 | Return the rhodecode realm from database. |
|
495 | 433 | """ |
|
496 | 434 | from rhodecode.model.settings import SettingsModel |
|
497 | 435 | realm = SettingsModel().get_setting_by_name('realm') |
|
498 | 436 | return safe_str(realm.app_settings_value) |
|
499 | 437 | |
|
500 | 438 | |
|
501 | 439 | def get_rhodecode_base_path(): |
|
502 | 440 | """ |
|
503 | 441 | Returns the base path. The base path is the filesystem path which points |
|
504 | 442 | to the repository store. |
|
505 | 443 | """ |
|
506 | 444 | from rhodecode.model.settings import SettingsModel |
|
507 | 445 | paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/') |
|
508 | 446 | return safe_str(paths_ui.ui_value) |
|
509 | 447 | |
|
510 | 448 | |
|
511 | 449 | def map_groups(path): |
|
512 | 450 | """ |
|
513 | 451 | Given a full path to a repository, create all nested groups that this |
|
514 | 452 | repo is inside. This function creates parent-child relationships between |
|
515 | 453 | groups and creates default perms for all new groups. |
|
516 | 454 | |
|
517 | 455 | :param paths: full path to repository |
|
518 | 456 | """ |
|
519 | 457 | from rhodecode.model.repo_group import RepoGroupModel |
|
520 | 458 | sa = meta.Session() |
|
521 | 459 | groups = path.split(Repository.NAME_SEP) |
|
522 | 460 | parent = None |
|
523 | 461 | group = None |
|
524 | 462 | |
|
525 | 463 | # last element is repo in nested groups structure |
|
526 | 464 | groups = groups[:-1] |
|
527 | 465 | rgm = RepoGroupModel(sa) |
|
528 | 466 | owner = User.get_first_super_admin() |
|
529 | 467 | for lvl, group_name in enumerate(groups): |
|
530 | 468 | group_name = '/'.join(groups[:lvl] + [group_name]) |
|
531 | 469 | group = RepoGroup.get_by_group_name(group_name) |
|
532 | 470 | desc = '%s group' % group_name |
|
533 | 471 | |
|
534 | 472 | # skip folders that are now removed repos |
|
535 | 473 | if REMOVED_REPO_PAT.match(group_name): |
|
536 | 474 | break |
|
537 | 475 | |
|
538 | 476 | if group is None: |
|
539 | 477 | log.debug('creating group level: %s group_name: %s', |
|
540 | 478 | lvl, group_name) |
|
541 | 479 | group = RepoGroup(group_name, parent) |
|
542 | 480 | group.group_description = desc |
|
543 | 481 | group.user = owner |
|
544 | 482 | sa.add(group) |
|
545 | 483 | perm_obj = rgm._create_default_perms(group) |
|
546 | 484 | sa.add(perm_obj) |
|
547 | 485 | sa.flush() |
|
548 | 486 | |
|
549 | 487 | parent = group |
|
550 | 488 | return group |
|
551 | 489 | |
|
552 | 490 | |
|
553 | 491 | def repo2db_mapper(initial_repo_list, remove_obsolete=False): |
|
554 | 492 | """ |
|
555 | 493 | maps all repos given in initial_repo_list, non existing repositories |
|
556 | 494 | are created, if remove_obsolete is True it also checks for db entries |
|
557 | 495 | that are not in initial_repo_list and removes them. |
|
558 | 496 | |
|
559 | 497 | :param initial_repo_list: list of repositories found by scanning methods |
|
560 | 498 | :param remove_obsolete: check for obsolete entries in database |
|
561 | 499 | """ |
|
562 | 500 | from rhodecode.model.repo import RepoModel |
|
563 | 501 | from rhodecode.model.scm import ScmModel |
|
564 | 502 | from rhodecode.model.repo_group import RepoGroupModel |
|
565 | 503 | from rhodecode.model.settings import SettingsModel |
|
566 | 504 | |
|
567 | 505 | sa = meta.Session() |
|
568 | 506 | repo_model = RepoModel() |
|
569 | 507 | user = User.get_first_super_admin() |
|
570 | 508 | added = [] |
|
571 | 509 | |
|
572 | 510 | # creation defaults |
|
573 | 511 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
574 | 512 | enable_statistics = defs.get('repo_enable_statistics') |
|
575 | 513 | enable_locking = defs.get('repo_enable_locking') |
|
576 | 514 | enable_downloads = defs.get('repo_enable_downloads') |
|
577 | 515 | private = defs.get('repo_private') |
|
578 | 516 | |
|
579 | 517 | for name, repo in initial_repo_list.items(): |
|
580 | 518 | group = map_groups(name) |
|
581 | 519 | unicode_name = safe_unicode(name) |
|
582 | 520 | db_repo = repo_model.get_by_repo_name(unicode_name) |
|
583 | 521 | # found repo that is on filesystem not in RhodeCode database |
|
584 | 522 | if not db_repo: |
|
585 | 523 | log.info('repository %s not found, creating now', name) |
|
586 | 524 | added.append(name) |
|
587 | 525 | desc = (repo.description |
|
588 | 526 | if repo.description != 'unknown' |
|
589 | 527 | else '%s repository' % name) |
|
590 | 528 | |
|
591 | 529 | db_repo = repo_model._create_repo( |
|
592 | 530 | repo_name=name, |
|
593 | 531 | repo_type=repo.alias, |
|
594 | 532 | description=desc, |
|
595 | 533 | repo_group=getattr(group, 'group_id', None), |
|
596 | 534 | owner=user, |
|
597 | 535 | enable_locking=enable_locking, |
|
598 | 536 | enable_downloads=enable_downloads, |
|
599 | 537 | enable_statistics=enable_statistics, |
|
600 | 538 | private=private, |
|
601 | 539 | state=Repository.STATE_CREATED |
|
602 | 540 | ) |
|
603 | 541 | sa.commit() |
|
604 | 542 | # we added that repo just now, and make sure we updated server info |
|
605 | 543 | if db_repo.repo_type == 'git': |
|
606 | 544 | git_repo = db_repo.scm_instance() |
|
607 | 545 | # update repository server-info |
|
608 | 546 | log.debug('Running update server info') |
|
609 | 547 | git_repo._update_server_info() |
|
610 | 548 | |
|
611 | 549 | db_repo.update_commit_cache() |
|
612 | 550 | |
|
613 | 551 | config = db_repo._config |
|
614 | 552 | config.set('extensions', 'largefiles', '') |
|
615 | 553 | ScmModel().install_hooks( |
|
616 | 554 | db_repo.scm_instance(config=config), |
|
617 | 555 | repo_type=db_repo.repo_type) |
|
618 | 556 | |
|
619 | 557 | removed = [] |
|
620 | 558 | if remove_obsolete: |
|
621 | 559 | # remove from database those repositories that are not in the filesystem |
|
622 | 560 | for repo in sa.query(Repository).all(): |
|
623 | 561 | if repo.repo_name not in initial_repo_list.keys(): |
|
624 | 562 | log.debug("Removing non-existing repository found in db `%s`", |
|
625 | 563 | repo.repo_name) |
|
626 | 564 | try: |
|
627 | 565 | RepoModel(sa).delete(repo, forks='detach', fs_remove=False) |
|
628 | 566 | sa.commit() |
|
629 | 567 | removed.append(repo.repo_name) |
|
630 | 568 | except Exception: |
|
631 | 569 | # don't hold further removals on error |
|
632 | 570 | log.error(traceback.format_exc()) |
|
633 | 571 | sa.rollback() |
|
634 | 572 | |
|
635 | 573 | def splitter(full_repo_name): |
|
636 | 574 | _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1) |
|
637 | 575 | gr_name = None |
|
638 | 576 | if len(_parts) == 2: |
|
639 | 577 | gr_name = _parts[0] |
|
640 | 578 | return gr_name |
|
641 | 579 | |
|
642 | 580 | initial_repo_group_list = [splitter(x) for x in |
|
643 | 581 | initial_repo_list.keys() if splitter(x)] |
|
644 | 582 | |
|
645 | 583 | # remove from database those repository groups that are not in the |
|
646 | 584 | # filesystem due to parent child relationships we need to delete them |
|
647 | 585 | # in a specific order of most nested first |
|
648 | 586 | all_groups = [x.group_name for x in sa.query(RepoGroup).all()] |
|
649 | 587 | nested_sort = lambda gr: len(gr.split('/')) |
|
650 | 588 | for group_name in sorted(all_groups, key=nested_sort, reverse=True): |
|
651 | 589 | if group_name not in initial_repo_group_list: |
|
652 | 590 | repo_group = RepoGroup.get_by_group_name(group_name) |
|
653 | 591 | if (repo_group.children.all() or |
|
654 | 592 | not RepoGroupModel().check_exist_filesystem( |
|
655 | 593 | group_name=group_name, exc_on_failure=False)): |
|
656 | 594 | continue |
|
657 | 595 | |
|
658 | 596 | log.info( |
|
659 | 597 | 'Removing non-existing repository group found in db `%s`', |
|
660 | 598 | group_name) |
|
661 | 599 | try: |
|
662 | 600 | RepoGroupModel(sa).delete(group_name, fs_remove=False) |
|
663 | 601 | sa.commit() |
|
664 | 602 | removed.append(group_name) |
|
665 | 603 | except Exception: |
|
666 | 604 | # don't hold further removals on error |
|
667 | 605 | log.exception( |
|
668 | 606 | 'Unable to remove repository group `%s`', |
|
669 | 607 | group_name) |
|
670 | 608 | sa.rollback() |
|
671 | 609 | raise |
|
672 | 610 | |
|
673 | 611 | return added, removed |
|
674 | 612 | |
|
675 | 613 | |
|
676 | 614 | def get_default_cache_settings(settings): |
|
677 | 615 | cache_settings = {} |
|
678 | 616 | for key in settings.keys(): |
|
679 | 617 | for prefix in ['beaker.cache.', 'cache.']: |
|
680 | 618 | if key.startswith(prefix): |
|
681 | 619 | name = key.split(prefix)[1].strip() |
|
682 | 620 | cache_settings[name] = settings[key].strip() |
|
683 | 621 | return cache_settings |
|
684 | 622 | |
|
685 | 623 | |
|
686 | 624 | # set cache regions for beaker so celery can utilise it |
|
687 | 625 | def add_cache(settings): |
|
688 | 626 | from rhodecode.lib import caches |
|
689 | 627 | cache_settings = {'regions': None} |
|
690 | 628 | # main cache settings used as default ... |
|
691 | 629 | cache_settings.update(get_default_cache_settings(settings)) |
|
692 | 630 | |
|
693 | 631 | if cache_settings['regions']: |
|
694 | 632 | for region in cache_settings['regions'].split(','): |
|
695 | 633 | region = region.strip() |
|
696 | 634 | region_settings = {} |
|
697 | 635 | for key, value in cache_settings.items(): |
|
698 | 636 | if key.startswith(region): |
|
699 | 637 | region_settings[key.split('.')[1]] = value |
|
700 | 638 | |
|
701 | 639 | caches.configure_cache_region( |
|
702 | 640 | region, region_settings, cache_settings) |
|
703 | 641 | |
|
704 | 642 | |
|
705 | 643 | def load_rcextensions(root_path): |
|
706 | 644 | import rhodecode |
|
707 | 645 | from rhodecode.config import conf |
|
708 | 646 | |
|
709 | 647 | path = os.path.join(root_path, 'rcextensions', '__init__.py') |
|
710 | 648 | if os.path.isfile(path): |
|
711 | 649 | rcext = create_module('rc', path) |
|
712 | 650 | EXT = rhodecode.EXTENSIONS = rcext |
|
713 | 651 | log.debug('Found rcextensions now loading %s...', rcext) |
|
714 | 652 | |
|
715 | 653 | # Additional mappings that are not present in the pygments lexers |
|
716 | 654 | conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {})) |
|
717 | 655 | |
|
718 | 656 | # auto check if the module is not missing any data, set to default if is |
|
719 | 657 | # this will help autoupdate new feature of rcext module |
|
720 | 658 | #from rhodecode.config import rcextensions |
|
721 | 659 | #for k in dir(rcextensions): |
|
722 | 660 | # if not k.startswith('_') and not hasattr(EXT, k): |
|
723 | 661 | # setattr(EXT, k, getattr(rcextensions, k)) |
|
724 | 662 | |
|
725 | 663 | |
|
726 | 664 | def get_custom_lexer(extension): |
|
727 | 665 | """ |
|
728 | 666 | returns a custom lexer if it is defined in rcextensions module, or None |
|
729 | 667 | if there's no custom lexer defined |
|
730 | 668 | """ |
|
731 | 669 | import rhodecode |
|
732 | 670 | from pygments import lexers |
|
733 | 671 | |
|
734 | 672 | # custom override made by RhodeCode |
|
735 | 673 | if extension in ['mako']: |
|
736 | 674 | return lexers.get_lexer_by_name('html+mako') |
|
737 | 675 | |
|
738 | 676 | # check if we didn't define this extension as other lexer |
|
739 | 677 | extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None) |
|
740 | 678 | if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS: |
|
741 | 679 | _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension] |
|
742 | 680 | return lexers.get_lexer_by_name(_lexer_name) |
|
743 | 681 | |
|
744 | 682 | |
|
745 | 683 | #============================================================================== |
|
746 | 684 | # TEST FUNCTIONS AND CREATORS |
|
747 | 685 | #============================================================================== |
|
748 | 686 | def create_test_index(repo_location, config): |
|
749 | 687 | """ |
|
750 | 688 | Makes default test index. |
|
751 | 689 | """ |
|
752 | 690 | import rc_testdata |
|
753 | 691 | |
|
754 | 692 | rc_testdata.extract_search_index( |
|
755 | 693 | 'vcs_search_index', os.path.dirname(config['search.location'])) |
|
756 | 694 | |
|
757 | 695 | |
|
758 | 696 | def create_test_directory(test_path): |
|
759 | 697 | """ |
|
760 | 698 | Create test directory if it doesn't exist. |
|
761 | 699 | """ |
|
762 | 700 | if not os.path.isdir(test_path): |
|
763 | 701 | log.debug('Creating testdir %s', test_path) |
|
764 | 702 | os.makedirs(test_path) |
|
765 | 703 | |
|
766 | 704 | |
|
767 | 705 | def create_test_database(test_path, config): |
|
768 | 706 | """ |
|
769 | 707 | Makes a fresh database. |
|
770 | 708 | """ |
|
771 | 709 | from rhodecode.lib.db_manage import DbManage |
|
772 | 710 | |
|
773 | 711 | # PART ONE create db |
|
774 | 712 | dbconf = config['sqlalchemy.db1.url'] |
|
775 | 713 | log.debug('making test db %s', dbconf) |
|
776 | 714 | |
|
777 | 715 | dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'], |
|
778 | 716 | tests=True, cli_args={'force_ask': True}) |
|
779 | 717 | dbmanage.create_tables(override=True) |
|
780 | 718 | dbmanage.set_db_version() |
|
781 | 719 | # for tests dynamically set new root paths based on generated content |
|
782 | 720 | dbmanage.create_settings(dbmanage.config_prompt(test_path)) |
|
783 | 721 | dbmanage.create_default_user() |
|
784 | 722 | dbmanage.create_test_admin_and_users() |
|
785 | 723 | dbmanage.create_permissions() |
|
786 | 724 | dbmanage.populate_default_permissions() |
|
787 | 725 | Session().commit() |
|
788 | 726 | |
|
789 | 727 | |
|
790 | 728 | def create_test_repositories(test_path, config): |
|
791 | 729 | """ |
|
792 | 730 | Creates test repositories in the temporary directory. Repositories are |
|
793 | 731 | extracted from archives within the rc_testdata package. |
|
794 | 732 | """ |
|
795 | 733 | import rc_testdata |
|
796 | 734 | from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO |
|
797 | 735 | |
|
798 | 736 | log.debug('making test vcs repositories') |
|
799 | 737 | |
|
800 | 738 | idx_path = config['search.location'] |
|
801 | 739 | data_path = config['cache_dir'] |
|
802 | 740 | |
|
803 | 741 | # clean index and data |
|
804 | 742 | if idx_path and os.path.exists(idx_path): |
|
805 | 743 | log.debug('remove %s', idx_path) |
|
806 | 744 | shutil.rmtree(idx_path) |
|
807 | 745 | |
|
808 | 746 | if data_path and os.path.exists(data_path): |
|
809 | 747 | log.debug('remove %s', data_path) |
|
810 | 748 | shutil.rmtree(data_path) |
|
811 | 749 | |
|
812 | 750 | rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO)) |
|
813 | 751 | rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO)) |
|
814 | 752 | |
|
815 | 753 | # Note: Subversion is in the process of being integrated with the system, |
|
816 | 754 | # until we have a properly packed version of the test svn repository, this |
|
817 | 755 | # tries to copy over the repo from a package "rc_testdata" |
|
818 | 756 | svn_repo_path = rc_testdata.get_svn_repo_archive() |
|
819 | 757 | with tarfile.open(svn_repo_path) as tar: |
|
820 | 758 | tar.extractall(jn(test_path, SVN_REPO)) |
|
821 | 759 | |
|
822 | 760 | |
|
823 | 761 | #============================================================================== |
|
824 | 762 | # PASTER COMMANDS |
|
825 | 763 | #============================================================================== |
|
826 | 764 | class BasePasterCommand(Command): |
|
827 | 765 | """ |
|
828 | 766 | Abstract Base Class for paster commands. |
|
829 | 767 | |
|
830 | 768 | The celery commands are somewhat aggressive about loading |
|
831 | 769 | celery.conf, and since our module sets the `CELERY_LOADER` |
|
832 | 770 | environment variable to our loader, we have to bootstrap a bit and |
|
833 | 771 | make sure we've had a chance to load the pylons config off of the |
|
834 | 772 | command line, otherwise everything fails. |
|
835 | 773 | """ |
|
836 | 774 | min_args = 1 |
|
837 | 775 | min_args_error = "Please provide a paster config file as an argument." |
|
838 | 776 | takes_config_file = 1 |
|
839 | 777 | requires_config_file = True |
|
840 | 778 | |
|
841 | 779 | def notify_msg(self, msg, log=False): |
|
842 | 780 | """Make a notification to user, additionally if logger is passed |
|
843 | 781 | it logs this action using given logger |
|
844 | 782 | |
|
845 | 783 | :param msg: message that will be printed to user |
|
846 | 784 | :param log: logging instance, to use to additionally log this message |
|
847 | 785 | |
|
848 | 786 | """ |
|
849 | 787 | if log and isinstance(log, logging): |
|
850 | 788 | log(msg) |
|
851 | 789 | |
|
852 | 790 | def run(self, args): |
|
853 | 791 | """ |
|
854 | 792 | Overrides Command.run |
|
855 | 793 | |
|
856 | 794 | Checks for a config file argument and loads it. |
|
857 | 795 | """ |
|
858 | 796 | if len(args) < self.min_args: |
|
859 | 797 | raise BadCommand( |
|
860 | 798 | self.min_args_error % {'min_args': self.min_args, |
|
861 | 799 | 'actual_args': len(args)}) |
|
862 | 800 | |
|
863 | 801 | # Decrement because we're going to lob off the first argument. |
|
864 | 802 | # @@ This is hacky |
|
865 | 803 | self.min_args -= 1 |
|
866 | 804 | self.bootstrap_config(args[0]) |
|
867 | 805 | self.update_parser() |
|
868 | 806 | return super(BasePasterCommand, self).run(args[1:]) |
|
869 | 807 | |
|
870 | 808 | def update_parser(self): |
|
871 | 809 | """ |
|
872 | 810 | Abstract method. Allows for the class' parser to be updated |
|
873 | 811 | before the superclass' `run` method is called. Necessary to |
|
874 | 812 | allow options/arguments to be passed through to the underlying |
|
875 | 813 | celery command. |
|
876 | 814 | """ |
|
877 | 815 | raise NotImplementedError("Abstract Method.") |
|
878 | 816 | |
|
879 | 817 | def bootstrap_config(self, conf): |
|
880 | 818 | """ |
|
881 | 819 | Loads the pylons configuration. |
|
882 | 820 | """ |
|
883 | 821 | from pylons import config as pylonsconfig |
|
884 | 822 | |
|
885 | 823 | self.path_to_ini_file = os.path.realpath(conf) |
|
886 | 824 | conf = paste.deploy.appconfig('config:' + self.path_to_ini_file) |
|
887 | 825 | pylonsconfig.init_app(conf.global_conf, conf.local_conf) |
|
888 | 826 | |
|
889 | 827 | def _init_session(self): |
|
890 | 828 | """ |
|
891 | 829 | Inits SqlAlchemy Session |
|
892 | 830 | """ |
|
893 | 831 | logging.config.fileConfig(self.path_to_ini_file) |
|
894 | 832 | from pylons import config |
|
895 | 833 | from rhodecode.config.utils import initialize_database |
|
896 | 834 | |
|
897 | 835 | # get to remove repos !! |
|
898 | 836 | add_cache(config) |
|
899 | 837 | initialize_database(config) |
|
900 | 838 | |
|
901 | 839 | |
|
902 | 840 | @decorator.decorator |
|
903 | 841 | def jsonify(func, *args, **kwargs): |
|
904 | 842 | """Action decorator that formats output for JSON |
|
905 | 843 | |
|
906 | 844 | Given a function that will return content, this decorator will turn |
|
907 | 845 | the result into JSON, with a content-type of 'application/json' and |
|
908 | 846 | output it. |
|
909 | 847 | |
|
910 | 848 | """ |
|
911 | 849 | from pylons.decorators.util import get_pylons |
|
912 | 850 | from rhodecode.lib.ext_json import json |
|
913 | 851 | pylons = get_pylons(args) |
|
914 | 852 | pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8' |
|
915 | 853 | data = func(*args, **kwargs) |
|
916 | 854 | if isinstance(data, (list, tuple)): |
|
917 | 855 | msg = "JSON responses with Array envelopes are susceptible to " \ |
|
918 | 856 | "cross-site data leak attacks, see " \ |
|
919 | 857 | "http://wiki.pylonshq.com/display/pylonsfaq/Warnings" |
|
920 | 858 | warnings.warn(msg, Warning, 2) |
|
921 | 859 | log.warning(msg) |
|
922 | 860 | log.debug("Returning JSON wrapped action output") |
|
923 | 861 | return json.dumps(data, encoding='utf-8') |
|
924 | 862 | |
|
925 | 863 | |
|
926 | 864 | class PartialRenderer(object): |
|
927 | 865 | """ |
|
928 | 866 | Partial renderer used to render chunks of html used in datagrids |
|
929 | 867 | use like:: |
|
930 | 868 | |
|
931 | 869 | _render = PartialRenderer('data_table/_dt_elements.mako') |
|
932 | 870 | _render('quick_menu', args, kwargs) |
|
933 | 871 | PartialRenderer.h, |
|
934 | 872 | c, |
|
935 | 873 | _, |
|
936 | 874 | ungettext |
|
937 | 875 | are the template stuff initialized inside and can be re-used later |
|
938 | 876 | |
|
939 | 877 | :param tmpl_name: template path relate to /templates/ dir |
|
940 | 878 | """ |
|
941 | 879 | |
|
942 | 880 | def __init__(self, tmpl_name): |
|
943 | 881 | import rhodecode |
|
944 | 882 | from pylons import request, tmpl_context as c |
|
945 | 883 | from pylons.i18n.translation import _, ungettext |
|
946 | 884 | from rhodecode.lib import helpers as h |
|
947 | 885 | |
|
948 | 886 | self.tmpl_name = tmpl_name |
|
949 | 887 | self.rhodecode = rhodecode |
|
950 | 888 | self.c = c |
|
951 | 889 | self._ = _ |
|
952 | 890 | self.ungettext = ungettext |
|
953 | 891 | self.h = h |
|
954 | 892 | self.request = request |
|
955 | 893 | |
|
956 | 894 | def _mako_lookup(self): |
|
957 | 895 | _tmpl_lookup = self.rhodecode.CONFIG['pylons.app_globals'].mako_lookup |
|
958 | 896 | return _tmpl_lookup.get_template(self.tmpl_name) |
|
959 | 897 | |
|
960 | 898 | def _update_kwargs_for_render(self, kwargs): |
|
961 | 899 | """ |
|
962 | 900 | Inject params required for Mako rendering |
|
963 | 901 | """ |
|
964 | 902 | _kwargs = { |
|
965 | 903 | '_': self._, |
|
966 | 904 | 'h': self.h, |
|
967 | 905 | 'c': self.c, |
|
968 | 906 | 'request': self.request, |
|
969 | 907 | 'ungettext': self.ungettext, |
|
970 | 908 | } |
|
971 | 909 | _kwargs.update(kwargs) |
|
972 | 910 | return _kwargs |
|
973 | 911 | |
|
974 | 912 | def _render_with_exc(self, render_func, args, kwargs): |
|
975 | 913 | try: |
|
976 | 914 | return render_func.render(*args, **kwargs) |
|
977 | 915 | except: |
|
978 | 916 | log.error(exceptions.text_error_template().render()) |
|
979 | 917 | raise |
|
980 | 918 | |
|
981 | 919 | def _get_template(self, template_obj, def_name): |
|
982 | 920 | if def_name: |
|
983 | 921 | tmpl = template_obj.get_def(def_name) |
|
984 | 922 | else: |
|
985 | 923 | tmpl = template_obj |
|
986 | 924 | return tmpl |
|
987 | 925 | |
|
988 | 926 | def render(self, def_name, *args, **kwargs): |
|
989 | 927 | lookup_obj = self._mako_lookup() |
|
990 | 928 | tmpl = self._get_template(lookup_obj, def_name=def_name) |
|
991 | 929 | kwargs = self._update_kwargs_for_render(kwargs) |
|
992 | 930 | return self._render_with_exc(tmpl, args, kwargs) |
|
993 | 931 | |
|
994 | 932 | def __call__(self, tmpl, *args, **kwargs): |
|
995 | 933 | return self.render(tmpl, *args, **kwargs) |
|
996 | 934 | |
|
997 | 935 | |
|
998 | 936 | def password_changed(auth_user, session): |
|
999 | 937 | # Never report password change in case of default user or anonymous user. |
|
1000 | 938 | if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None: |
|
1001 | 939 | return False |
|
1002 | 940 | |
|
1003 | 941 | password_hash = md5(auth_user.password) if auth_user.password else None |
|
1004 | 942 | rhodecode_user = session.get('rhodecode_user', {}) |
|
1005 | 943 | session_password_hash = rhodecode_user.get('password', '') |
|
1006 | 944 | return password_hash != session_password_hash |
|
1007 | 945 | |
|
1008 | 946 | |
|
1009 | 947 | def read_opensource_licenses(): |
|
1010 | 948 | global _license_cache |
|
1011 | 949 | |
|
1012 | 950 | if not _license_cache: |
|
1013 | 951 | licenses = pkg_resources.resource_string( |
|
1014 | 952 | 'rhodecode', 'config/licenses.json') |
|
1015 | 953 | _license_cache = json.loads(licenses) |
|
1016 | 954 | |
|
1017 | 955 | return _license_cache |
|
1018 | 956 | |
|
1019 | 957 | |
|
1020 | 958 | def get_registry(request): |
|
1021 | 959 | """ |
|
1022 | 960 | Utility to get the pyramid registry from a request. During migration to |
|
1023 | 961 | pyramid we sometimes want to use the pyramid registry from pylons context. |
|
1024 | 962 | Therefore this utility returns `request.registry` for pyramid requests and |
|
1025 | 963 | uses `get_current_registry()` for pylons requests. |
|
1026 | 964 | """ |
|
1027 | 965 | try: |
|
1028 | 966 | return request.registry |
|
1029 | 967 | except AttributeError: |
|
1030 | 968 | return get_current_registry() |
|
1031 | 969 | |
|
1032 | 970 | |
|
1033 | 971 | def generate_platform_uuid(): |
|
1034 | 972 | """ |
|
1035 | 973 | Generates platform UUID based on it's name |
|
1036 | 974 | """ |
|
1037 | 975 | import platform |
|
1038 | 976 | |
|
1039 | 977 | try: |
|
1040 | 978 | uuid_list = [platform.platform()] |
|
1041 | 979 | return hashlib.sha256(':'.join(uuid_list)).hexdigest() |
|
1042 | 980 | except Exception as e: |
|
1043 | 981 | log.error('Failed to generate host uuid: %s' % e) |
|
1044 | 982 | return 'UNDEFINED' |
@@ -1,650 +1,666 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | comments model for RhodeCode |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | import traceback |
|
27 | 27 | import collections |
|
28 | 28 | |
|
29 | 29 | from datetime import datetime |
|
30 | 30 | |
|
31 | 31 | from pylons.i18n.translation import _ |
|
32 | 32 | from pyramid.threadlocal import get_current_registry, get_current_request |
|
33 | 33 | from sqlalchemy.sql.expression import null |
|
34 | 34 | from sqlalchemy.sql.functions import coalesce |
|
35 | 35 | |
|
36 | 36 | from rhodecode.lib import helpers as h, diffs |
|
37 | from rhodecode.lib import audit_logger | |
|
37 | 38 | from rhodecode.lib.channelstream import channelstream_request |
|
38 | from rhodecode.lib.utils import action_logger | |
|
39 | 39 | from rhodecode.lib.utils2 import extract_mentioned_users, safe_str |
|
40 | 40 | from rhodecode.model import BaseModel |
|
41 | 41 | from rhodecode.model.db import ( |
|
42 | 42 | ChangesetComment, User, Notification, PullRequest, AttributeDict) |
|
43 | 43 | from rhodecode.model.notification import NotificationModel |
|
44 | 44 | from rhodecode.model.meta import Session |
|
45 | 45 | from rhodecode.model.settings import VcsSettingsModel |
|
46 | 46 | from rhodecode.model.notification import EmailNotificationModel |
|
47 | 47 | from rhodecode.model.validation_schema.schemas import comment_schema |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | log = logging.getLogger(__name__) |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | class CommentsModel(BaseModel): |
|
54 | 54 | |
|
55 | 55 | cls = ChangesetComment |
|
56 | 56 | |
|
57 | 57 | DIFF_CONTEXT_BEFORE = 3 |
|
58 | 58 | DIFF_CONTEXT_AFTER = 3 |
|
59 | 59 | |
|
60 | 60 | def __get_commit_comment(self, changeset_comment): |
|
61 | 61 | return self._get_instance(ChangesetComment, changeset_comment) |
|
62 | 62 | |
|
63 | 63 | def __get_pull_request(self, pull_request): |
|
64 | 64 | return self._get_instance(PullRequest, pull_request) |
|
65 | 65 | |
|
66 | 66 | def _extract_mentions(self, s): |
|
67 | 67 | user_objects = [] |
|
68 | 68 | for username in extract_mentioned_users(s): |
|
69 | 69 | user_obj = User.get_by_username(username, case_insensitive=True) |
|
70 | 70 | if user_obj: |
|
71 | 71 | user_objects.append(user_obj) |
|
72 | 72 | return user_objects |
|
73 | 73 | |
|
74 | 74 | def _get_renderer(self, global_renderer='rst'): |
|
75 | 75 | try: |
|
76 | 76 | # try reading from visual context |
|
77 | 77 | from pylons import tmpl_context |
|
78 | 78 | global_renderer = tmpl_context.visual.default_renderer |
|
79 | 79 | except AttributeError: |
|
80 | 80 | log.debug("Renderer not set, falling back " |
|
81 | 81 | "to default renderer '%s'", global_renderer) |
|
82 | 82 | except Exception: |
|
83 | 83 | log.error(traceback.format_exc()) |
|
84 | 84 | return global_renderer |
|
85 | 85 | |
|
86 | 86 | def aggregate_comments(self, comments, versions, show_version, inline=False): |
|
87 | 87 | # group by versions, and count until, and display objects |
|
88 | 88 | |
|
89 | 89 | comment_groups = collections.defaultdict(list) |
|
90 | 90 | [comment_groups[ |
|
91 | 91 | _co.pull_request_version_id].append(_co) for _co in comments] |
|
92 | 92 | |
|
93 | 93 | def yield_comments(pos): |
|
94 | 94 | for co in comment_groups[pos]: |
|
95 | 95 | yield co |
|
96 | 96 | |
|
97 | 97 | comment_versions = collections.defaultdict( |
|
98 | 98 | lambda: collections.defaultdict(list)) |
|
99 | 99 | prev_prvid = -1 |
|
100 | 100 | # fake last entry with None, to aggregate on "latest" version which |
|
101 | 101 | # doesn't have an pull_request_version_id |
|
102 | 102 | for ver in versions + [AttributeDict({'pull_request_version_id': None})]: |
|
103 | 103 | prvid = ver.pull_request_version_id |
|
104 | 104 | if prev_prvid == -1: |
|
105 | 105 | prev_prvid = prvid |
|
106 | 106 | |
|
107 | 107 | for co in yield_comments(prvid): |
|
108 | 108 | comment_versions[prvid]['at'].append(co) |
|
109 | 109 | |
|
110 | 110 | # save until |
|
111 | 111 | current = comment_versions[prvid]['at'] |
|
112 | 112 | prev_until = comment_versions[prev_prvid]['until'] |
|
113 | 113 | cur_until = prev_until + current |
|
114 | 114 | comment_versions[prvid]['until'].extend(cur_until) |
|
115 | 115 | |
|
116 | 116 | # save outdated |
|
117 | 117 | if inline: |
|
118 | 118 | outdated = [x for x in cur_until |
|
119 | 119 | if x.outdated_at_version(show_version)] |
|
120 | 120 | else: |
|
121 | 121 | outdated = [x for x in cur_until |
|
122 | 122 | if x.older_than_version(show_version)] |
|
123 | 123 | display = [x for x in cur_until if x not in outdated] |
|
124 | 124 | |
|
125 | 125 | comment_versions[prvid]['outdated'] = outdated |
|
126 | 126 | comment_versions[prvid]['display'] = display |
|
127 | 127 | |
|
128 | 128 | prev_prvid = prvid |
|
129 | 129 | |
|
130 | 130 | return comment_versions |
|
131 | 131 | |
|
132 | 132 | def get_unresolved_todos(self, pull_request, show_outdated=True): |
|
133 | 133 | |
|
134 | 134 | todos = Session().query(ChangesetComment) \ |
|
135 | 135 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
136 | 136 | .filter(ChangesetComment.resolved_by == None) \ |
|
137 | 137 | .filter(ChangesetComment.comment_type |
|
138 | 138 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
139 | 139 | |
|
140 | 140 | if not show_outdated: |
|
141 | 141 | todos = todos.filter( |
|
142 | 142 | coalesce(ChangesetComment.display_state, '') != |
|
143 | 143 | ChangesetComment.COMMENT_OUTDATED) |
|
144 | 144 | |
|
145 | 145 | todos = todos.all() |
|
146 | 146 | |
|
147 | 147 | return todos |
|
148 | 148 | |
|
149 | 149 | def get_commit_unresolved_todos(self, commit_id, show_outdated=True): |
|
150 | 150 | |
|
151 | 151 | todos = Session().query(ChangesetComment) \ |
|
152 | 152 | .filter(ChangesetComment.revision == commit_id) \ |
|
153 | 153 | .filter(ChangesetComment.resolved_by == None) \ |
|
154 | 154 | .filter(ChangesetComment.comment_type |
|
155 | 155 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
156 | 156 | |
|
157 | 157 | if not show_outdated: |
|
158 | 158 | todos = todos.filter( |
|
159 | 159 | coalesce(ChangesetComment.display_state, '') != |
|
160 | 160 | ChangesetComment.COMMENT_OUTDATED) |
|
161 | 161 | |
|
162 | 162 | todos = todos.all() |
|
163 | 163 | |
|
164 | 164 | return todos |
|
165 | 165 | |
|
166 | def _log_audit_action(self, action, action_data, user, comment): | |
|
167 | audit_logger.store( | |
|
168 | action=action, | |
|
169 | action_data=action_data, | |
|
170 | user=user, | |
|
171 | repo=comment.repo) | |
|
172 | ||
|
166 | 173 | def create(self, text, repo, user, commit_id=None, pull_request=None, |
|
167 | 174 | f_path=None, line_no=None, status_change=None, |
|
168 | 175 | status_change_type=None, comment_type=None, |
|
169 | 176 | resolves_comment_id=None, closing_pr=False, send_email=True, |
|
170 | 177 | renderer=None): |
|
171 | 178 | """ |
|
172 | 179 | Creates new comment for commit or pull request. |
|
173 | 180 | IF status_change is not none this comment is associated with a |
|
174 | 181 | status change of commit or commit associated with pull request |
|
175 | 182 | |
|
176 | 183 | :param text: |
|
177 | 184 | :param repo: |
|
178 | 185 | :param user: |
|
179 | 186 | :param commit_id: |
|
180 | 187 | :param pull_request: |
|
181 | 188 | :param f_path: |
|
182 | 189 | :param line_no: |
|
183 | 190 | :param status_change: Label for status change |
|
184 | 191 | :param comment_type: Type of comment |
|
185 | 192 | :param status_change_type: type of status change |
|
186 | 193 | :param closing_pr: |
|
187 | 194 | :param send_email: |
|
188 | 195 | :param renderer: pick renderer for this comment |
|
189 | 196 | """ |
|
190 | 197 | if not text: |
|
191 | 198 | log.warning('Missing text for comment, skipping...') |
|
192 | 199 | return |
|
193 | 200 | |
|
194 | 201 | if not renderer: |
|
195 | 202 | renderer = self._get_renderer() |
|
196 | 203 | |
|
197 | 204 | repo = self._get_repo(repo) |
|
198 | 205 | user = self._get_user(user) |
|
199 | 206 | |
|
200 | 207 | schema = comment_schema.CommentSchema() |
|
201 | 208 | validated_kwargs = schema.deserialize(dict( |
|
202 | 209 | comment_body=text, |
|
203 | 210 | comment_type=comment_type, |
|
204 | 211 | comment_file=f_path, |
|
205 | 212 | comment_line=line_no, |
|
206 | 213 | renderer_type=renderer, |
|
207 | 214 | status_change=status_change_type, |
|
208 | 215 | resolves_comment_id=resolves_comment_id, |
|
209 | 216 | repo=repo.repo_id, |
|
210 | 217 | user=user.user_id, |
|
211 | 218 | )) |
|
212 | 219 | |
|
213 | 220 | comment = ChangesetComment() |
|
214 | 221 | comment.renderer = validated_kwargs['renderer_type'] |
|
215 | 222 | comment.text = validated_kwargs['comment_body'] |
|
216 | 223 | comment.f_path = validated_kwargs['comment_file'] |
|
217 | 224 | comment.line_no = validated_kwargs['comment_line'] |
|
218 | 225 | comment.comment_type = validated_kwargs['comment_type'] |
|
219 | 226 | |
|
220 | 227 | comment.repo = repo |
|
221 | 228 | comment.author = user |
|
222 | 229 | comment.resolved_comment = self.__get_commit_comment( |
|
223 | 230 | validated_kwargs['resolves_comment_id']) |
|
224 | 231 | |
|
225 | 232 | pull_request_id = pull_request |
|
226 | 233 | |
|
227 | 234 | commit_obj = None |
|
228 | 235 | pull_request_obj = None |
|
229 | 236 | |
|
230 | 237 | if commit_id: |
|
231 | 238 | notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT |
|
232 | 239 | # do a lookup, so we don't pass something bad here |
|
233 | 240 | commit_obj = repo.scm_instance().get_commit(commit_id=commit_id) |
|
234 | 241 | comment.revision = commit_obj.raw_id |
|
235 | 242 | |
|
236 | 243 | elif pull_request_id: |
|
237 | 244 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT |
|
238 | 245 | pull_request_obj = self.__get_pull_request(pull_request_id) |
|
239 | 246 | comment.pull_request = pull_request_obj |
|
240 | 247 | else: |
|
241 | 248 | raise Exception('Please specify commit or pull_request_id') |
|
242 | 249 | |
|
243 | 250 | Session().add(comment) |
|
244 | 251 | Session().flush() |
|
245 | 252 | kwargs = { |
|
246 | 253 | 'user': user, |
|
247 | 254 | 'renderer_type': renderer, |
|
248 | 255 | 'repo_name': repo.repo_name, |
|
249 | 256 | 'status_change': status_change, |
|
250 | 257 | 'status_change_type': status_change_type, |
|
251 | 258 | 'comment_body': text, |
|
252 | 259 | 'comment_file': f_path, |
|
253 | 260 | 'comment_line': line_no, |
|
254 | 261 | 'comment_type': comment_type or 'note' |
|
255 | 262 | } |
|
256 | 263 | |
|
257 | 264 | if commit_obj: |
|
258 | 265 | recipients = ChangesetComment.get_users( |
|
259 | 266 | revision=commit_obj.raw_id) |
|
260 | 267 | # add commit author if it's in RhodeCode system |
|
261 | 268 | cs_author = User.get_from_cs_author(commit_obj.author) |
|
262 | 269 | if not cs_author: |
|
263 | 270 | # use repo owner if we cannot extract the author correctly |
|
264 | 271 | cs_author = repo.user |
|
265 | 272 | recipients += [cs_author] |
|
266 | 273 | |
|
267 | 274 | commit_comment_url = self.get_url(comment) |
|
268 | 275 | |
|
269 | 276 | target_repo_url = h.link_to( |
|
270 | 277 | repo.repo_name, |
|
271 | 278 | h.route_url('repo_summary', repo_name=repo.repo_name)) |
|
272 | 279 | |
|
273 | 280 | # commit specifics |
|
274 | 281 | kwargs.update({ |
|
275 | 282 | 'commit': commit_obj, |
|
276 | 283 | 'commit_message': commit_obj.message, |
|
277 | 284 | 'commit_target_repo': target_repo_url, |
|
278 | 285 | 'commit_comment_url': commit_comment_url, |
|
279 | 286 | }) |
|
280 | 287 | |
|
281 | 288 | elif pull_request_obj: |
|
282 | 289 | # get the current participants of this pull request |
|
283 | 290 | recipients = ChangesetComment.get_users( |
|
284 | 291 | pull_request_id=pull_request_obj.pull_request_id) |
|
285 | 292 | # add pull request author |
|
286 | 293 | recipients += [pull_request_obj.author] |
|
287 | 294 | |
|
288 | 295 | # add the reviewers to notification |
|
289 | 296 | recipients += [x.user for x in pull_request_obj.reviewers] |
|
290 | 297 | |
|
291 | 298 | pr_target_repo = pull_request_obj.target_repo |
|
292 | 299 | pr_source_repo = pull_request_obj.source_repo |
|
293 | 300 | |
|
294 | 301 | pr_comment_url = h.url( |
|
295 | 302 | 'pullrequest_show', |
|
296 | 303 | repo_name=pr_target_repo.repo_name, |
|
297 | 304 | pull_request_id=pull_request_obj.pull_request_id, |
|
298 | 305 | anchor='comment-%s' % comment.comment_id, |
|
299 | 306 | qualified=True,) |
|
300 | 307 | |
|
301 | 308 | # set some variables for email notification |
|
302 | 309 | pr_target_repo_url = h.route_url( |
|
303 | 310 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
304 | 311 | |
|
305 | 312 | pr_source_repo_url = h.route_url( |
|
306 | 313 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
307 | 314 | |
|
308 | 315 | # pull request specifics |
|
309 | 316 | kwargs.update({ |
|
310 | 317 | 'pull_request': pull_request_obj, |
|
311 | 318 | 'pr_id': pull_request_obj.pull_request_id, |
|
312 | 319 | 'pr_target_repo': pr_target_repo, |
|
313 | 320 | 'pr_target_repo_url': pr_target_repo_url, |
|
314 | 321 | 'pr_source_repo': pr_source_repo, |
|
315 | 322 | 'pr_source_repo_url': pr_source_repo_url, |
|
316 | 323 | 'pr_comment_url': pr_comment_url, |
|
317 | 324 | 'pr_closing': closing_pr, |
|
318 | 325 | }) |
|
319 | 326 | if send_email: |
|
320 | 327 | # pre-generate the subject for notification itself |
|
321 | 328 | (subject, |
|
322 | 329 | _h, _e, # we don't care about those |
|
323 | 330 | body_plaintext) = EmailNotificationModel().render_email( |
|
324 | 331 | notification_type, **kwargs) |
|
325 | 332 | |
|
326 | 333 | mention_recipients = set( |
|
327 | 334 | self._extract_mentions(text)).difference(recipients) |
|
328 | 335 | |
|
329 | 336 | # create notification objects, and emails |
|
330 | 337 | NotificationModel().create( |
|
331 | 338 | created_by=user, |
|
332 | 339 | notification_subject=subject, |
|
333 | 340 | notification_body=body_plaintext, |
|
334 | 341 | notification_type=notification_type, |
|
335 | 342 | recipients=recipients, |
|
336 | 343 | mention_recipients=mention_recipients, |
|
337 | 344 | email_kwargs=kwargs, |
|
338 | 345 | ) |
|
339 | 346 | |
|
340 | action = ( | |
|
341 |
|
|
|
342 | comment.pull_request.pull_request_id) | |
|
343 | if comment.pull_request | |
|
344 | else 'user_commented_revision:{}'.format(comment.revision) | |
|
345 | ) | |
|
346 | action_logger(user, action, comment.repo) | |
|
347 | Session().flush() | |
|
348 | if comment.pull_request: | |
|
349 | action = 'repo.pull_request.comment.create' | |
|
350 | else: | |
|
351 | action = 'repo.commit.comment.create' | |
|
352 | ||
|
353 | comment_data = comment.get_api_data() | |
|
354 | self._log_audit_action( | |
|
355 | action, {'data': comment_data}, user, comment) | |
|
347 | 356 | |
|
348 | 357 | registry = get_current_registry() |
|
349 | 358 | rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {}) |
|
350 | 359 | channelstream_config = rhodecode_plugins.get('channelstream', {}) |
|
351 | 360 | msg_url = '' |
|
352 | 361 | if commit_obj: |
|
353 | 362 | msg_url = commit_comment_url |
|
354 | 363 | repo_name = repo.repo_name |
|
355 | 364 | elif pull_request_obj: |
|
356 | 365 | msg_url = pr_comment_url |
|
357 | 366 | repo_name = pr_target_repo.repo_name |
|
358 | 367 | |
|
359 | 368 | if channelstream_config.get('enabled'): |
|
360 | 369 | message = '<strong>{}</strong> {} - ' \ |
|
361 | 370 | '<a onclick="window.location=\'{}\';' \ |
|
362 | 371 | 'window.location.reload()">' \ |
|
363 | 372 | '<strong>{}</strong></a>' |
|
364 | 373 | message = message.format( |
|
365 | 374 | user.username, _('made a comment'), msg_url, |
|
366 | 375 | _('Show it now')) |
|
367 | 376 | channel = '/repo${}$/pr/{}'.format( |
|
368 | 377 | repo_name, |
|
369 | 378 | pull_request_id |
|
370 | 379 | ) |
|
371 | 380 | payload = { |
|
372 | 381 | 'type': 'message', |
|
373 | 382 | 'timestamp': datetime.utcnow(), |
|
374 | 383 | 'user': 'system', |
|
375 | 384 | 'exclude_users': [user.username], |
|
376 | 385 | 'channel': channel, |
|
377 | 386 | 'message': { |
|
378 | 387 | 'message': message, |
|
379 | 388 | 'level': 'info', |
|
380 | 389 | 'topic': '/notifications' |
|
381 | 390 | } |
|
382 | 391 | } |
|
383 | 392 | channelstream_request(channelstream_config, [payload], |
|
384 | 393 | '/message', raise_exc=False) |
|
385 | 394 | |
|
386 | 395 | return comment |
|
387 | 396 | |
|
388 | def delete(self, comment): | |
|
397 | def delete(self, comment, user): | |
|
389 | 398 | """ |
|
390 | 399 | Deletes given comment |
|
391 | ||
|
392 | :param comment_id: | |
|
393 | 400 | """ |
|
394 | 401 | comment = self.__get_commit_comment(comment) |
|
402 | old_data = comment.get_api_data() | |
|
395 | 403 | Session().delete(comment) |
|
396 | 404 | |
|
405 | if comment.pull_request: | |
|
406 | action = 'repo.pull_request.comment.delete' | |
|
407 | else: | |
|
408 | action = 'repo.commit.comment.delete' | |
|
409 | ||
|
410 | self._log_audit_action( | |
|
411 | action, {'old_data': old_data}, user, comment) | |
|
412 | ||
|
397 | 413 | return comment |
|
398 | 414 | |
|
399 | 415 | def get_all_comments(self, repo_id, revision=None, pull_request=None): |
|
400 | 416 | q = ChangesetComment.query()\ |
|
401 | 417 | .filter(ChangesetComment.repo_id == repo_id) |
|
402 | 418 | if revision: |
|
403 | 419 | q = q.filter(ChangesetComment.revision == revision) |
|
404 | 420 | elif pull_request: |
|
405 | 421 | pull_request = self.__get_pull_request(pull_request) |
|
406 | 422 | q = q.filter(ChangesetComment.pull_request == pull_request) |
|
407 | 423 | else: |
|
408 | 424 | raise Exception('Please specify commit or pull_request') |
|
409 | 425 | q = q.order_by(ChangesetComment.created_on) |
|
410 | 426 | return q.all() |
|
411 | 427 | |
|
412 | 428 | def get_url(self, comment, request=None, permalink=False): |
|
413 | 429 | if not request: |
|
414 | 430 | request = get_current_request() |
|
415 | 431 | |
|
416 | 432 | comment = self.__get_commit_comment(comment) |
|
417 | 433 | if comment.pull_request: |
|
418 | 434 | pull_request = comment.pull_request |
|
419 | 435 | if permalink: |
|
420 | 436 | return request.route_url( |
|
421 | 437 | 'pull_requests_global', |
|
422 | 438 | pull_request_id=pull_request.pull_request_id, |
|
423 | 439 | _anchor='comment-%s' % comment.comment_id) |
|
424 | 440 | else: |
|
425 | 441 | return request.route_url( |
|
426 | 442 | 'pullrequest_show', |
|
427 | 443 | repo_name=safe_str(pull_request.target_repo.repo_name), |
|
428 | 444 | pull_request_id=pull_request.pull_request_id, |
|
429 | 445 | _anchor='comment-%s' % comment.comment_id) |
|
430 | 446 | |
|
431 | 447 | else: |
|
432 | 448 | repo = comment.repo |
|
433 | 449 | commit_id = comment.revision |
|
434 | 450 | |
|
435 | 451 | if permalink: |
|
436 | 452 | return request.route_url( |
|
437 | 453 | 'repo_commit', repo_name=safe_str(repo.repo_id), |
|
438 | 454 | commit_id=commit_id, |
|
439 | 455 | _anchor='comment-%s' % comment.comment_id) |
|
440 | 456 | |
|
441 | 457 | else: |
|
442 | 458 | return request.route_url( |
|
443 | 459 | 'repo_commit', repo_name=safe_str(repo.repo_name), |
|
444 | 460 | commit_id=commit_id, |
|
445 | 461 | _anchor='comment-%s' % comment.comment_id) |
|
446 | 462 | |
|
447 | 463 | def get_comments(self, repo_id, revision=None, pull_request=None): |
|
448 | 464 | """ |
|
449 | 465 | Gets main comments based on revision or pull_request_id |
|
450 | 466 | |
|
451 | 467 | :param repo_id: |
|
452 | 468 | :param revision: |
|
453 | 469 | :param pull_request: |
|
454 | 470 | """ |
|
455 | 471 | |
|
456 | 472 | q = ChangesetComment.query()\ |
|
457 | 473 | .filter(ChangesetComment.repo_id == repo_id)\ |
|
458 | 474 | .filter(ChangesetComment.line_no == None)\ |
|
459 | 475 | .filter(ChangesetComment.f_path == None) |
|
460 | 476 | if revision: |
|
461 | 477 | q = q.filter(ChangesetComment.revision == revision) |
|
462 | 478 | elif pull_request: |
|
463 | 479 | pull_request = self.__get_pull_request(pull_request) |
|
464 | 480 | q = q.filter(ChangesetComment.pull_request == pull_request) |
|
465 | 481 | else: |
|
466 | 482 | raise Exception('Please specify commit or pull_request') |
|
467 | 483 | q = q.order_by(ChangesetComment.created_on) |
|
468 | 484 | return q.all() |
|
469 | 485 | |
|
470 | 486 | def get_inline_comments(self, repo_id, revision=None, pull_request=None): |
|
471 | 487 | q = self._get_inline_comments_query(repo_id, revision, pull_request) |
|
472 | 488 | return self._group_comments_by_path_and_line_number(q) |
|
473 | 489 | |
|
474 | 490 | def get_inline_comments_count(self, inline_comments, skip_outdated=True, |
|
475 | 491 | version=None): |
|
476 | 492 | inline_cnt = 0 |
|
477 | 493 | for fname, per_line_comments in inline_comments.iteritems(): |
|
478 | 494 | for lno, comments in per_line_comments.iteritems(): |
|
479 | 495 | for comm in comments: |
|
480 | 496 | if not comm.outdated_at_version(version) and skip_outdated: |
|
481 | 497 | inline_cnt += 1 |
|
482 | 498 | |
|
483 | 499 | return inline_cnt |
|
484 | 500 | |
|
485 | 501 | def get_outdated_comments(self, repo_id, pull_request): |
|
486 | 502 | # TODO: johbo: Remove `repo_id`, it is not needed to find the comments |
|
487 | 503 | # of a pull request. |
|
488 | 504 | q = self._all_inline_comments_of_pull_request(pull_request) |
|
489 | 505 | q = q.filter( |
|
490 | 506 | ChangesetComment.display_state == |
|
491 | 507 | ChangesetComment.COMMENT_OUTDATED |
|
492 | 508 | ).order_by(ChangesetComment.comment_id.asc()) |
|
493 | 509 | |
|
494 | 510 | return self._group_comments_by_path_and_line_number(q) |
|
495 | 511 | |
|
496 | 512 | def _get_inline_comments_query(self, repo_id, revision, pull_request): |
|
497 | 513 | # TODO: johbo: Split this into two methods: One for PR and one for |
|
498 | 514 | # commit. |
|
499 | 515 | if revision: |
|
500 | 516 | q = Session().query(ChangesetComment).filter( |
|
501 | 517 | ChangesetComment.repo_id == repo_id, |
|
502 | 518 | ChangesetComment.line_no != null(), |
|
503 | 519 | ChangesetComment.f_path != null(), |
|
504 | 520 | ChangesetComment.revision == revision) |
|
505 | 521 | |
|
506 | 522 | elif pull_request: |
|
507 | 523 | pull_request = self.__get_pull_request(pull_request) |
|
508 | 524 | if not CommentsModel.use_outdated_comments(pull_request): |
|
509 | 525 | q = self._visible_inline_comments_of_pull_request(pull_request) |
|
510 | 526 | else: |
|
511 | 527 | q = self._all_inline_comments_of_pull_request(pull_request) |
|
512 | 528 | |
|
513 | 529 | else: |
|
514 | 530 | raise Exception('Please specify commit or pull_request_id') |
|
515 | 531 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
516 | 532 | return q |
|
517 | 533 | |
|
518 | 534 | def _group_comments_by_path_and_line_number(self, q): |
|
519 | 535 | comments = q.all() |
|
520 | 536 | paths = collections.defaultdict(lambda: collections.defaultdict(list)) |
|
521 | 537 | for co in comments: |
|
522 | 538 | paths[co.f_path][co.line_no].append(co) |
|
523 | 539 | return paths |
|
524 | 540 | |
|
525 | 541 | @classmethod |
|
526 | 542 | def needed_extra_diff_context(cls): |
|
527 | 543 | return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER) |
|
528 | 544 | |
|
529 | 545 | def outdate_comments(self, pull_request, old_diff_data, new_diff_data): |
|
530 | 546 | if not CommentsModel.use_outdated_comments(pull_request): |
|
531 | 547 | return |
|
532 | 548 | |
|
533 | 549 | comments = self._visible_inline_comments_of_pull_request(pull_request) |
|
534 | 550 | comments_to_outdate = comments.all() |
|
535 | 551 | |
|
536 | 552 | for comment in comments_to_outdate: |
|
537 | 553 | self._outdate_one_comment(comment, old_diff_data, new_diff_data) |
|
538 | 554 | |
|
539 | 555 | def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc): |
|
540 | 556 | diff_line = _parse_comment_line_number(comment.line_no) |
|
541 | 557 | |
|
542 | 558 | try: |
|
543 | 559 | old_context = old_diff_proc.get_context_of_line( |
|
544 | 560 | path=comment.f_path, diff_line=diff_line) |
|
545 | 561 | new_context = new_diff_proc.get_context_of_line( |
|
546 | 562 | path=comment.f_path, diff_line=diff_line) |
|
547 | 563 | except (diffs.LineNotInDiffException, |
|
548 | 564 | diffs.FileNotInDiffException): |
|
549 | 565 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
550 | 566 | return |
|
551 | 567 | |
|
552 | 568 | if old_context == new_context: |
|
553 | 569 | return |
|
554 | 570 | |
|
555 | 571 | if self._should_relocate_diff_line(diff_line): |
|
556 | 572 | new_diff_lines = new_diff_proc.find_context( |
|
557 | 573 | path=comment.f_path, context=old_context, |
|
558 | 574 | offset=self.DIFF_CONTEXT_BEFORE) |
|
559 | 575 | if not new_diff_lines: |
|
560 | 576 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
561 | 577 | else: |
|
562 | 578 | new_diff_line = self._choose_closest_diff_line( |
|
563 | 579 | diff_line, new_diff_lines) |
|
564 | 580 | comment.line_no = _diff_to_comment_line_number(new_diff_line) |
|
565 | 581 | else: |
|
566 | 582 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
567 | 583 | |
|
568 | 584 | def _should_relocate_diff_line(self, diff_line): |
|
569 | 585 | """ |
|
570 | 586 | Checks if relocation shall be tried for the given `diff_line`. |
|
571 | 587 | |
|
572 | 588 | If a comment points into the first lines, then we can have a situation |
|
573 | 589 | that after an update another line has been added on top. In this case |
|
574 | 590 | we would find the context still and move the comment around. This |
|
575 | 591 | would be wrong. |
|
576 | 592 | """ |
|
577 | 593 | should_relocate = ( |
|
578 | 594 | (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or |
|
579 | 595 | (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE)) |
|
580 | 596 | return should_relocate |
|
581 | 597 | |
|
582 | 598 | def _choose_closest_diff_line(self, diff_line, new_diff_lines): |
|
583 | 599 | candidate = new_diff_lines[0] |
|
584 | 600 | best_delta = _diff_line_delta(diff_line, candidate) |
|
585 | 601 | for new_diff_line in new_diff_lines[1:]: |
|
586 | 602 | delta = _diff_line_delta(diff_line, new_diff_line) |
|
587 | 603 | if delta < best_delta: |
|
588 | 604 | candidate = new_diff_line |
|
589 | 605 | best_delta = delta |
|
590 | 606 | return candidate |
|
591 | 607 | |
|
592 | 608 | def _visible_inline_comments_of_pull_request(self, pull_request): |
|
593 | 609 | comments = self._all_inline_comments_of_pull_request(pull_request) |
|
594 | 610 | comments = comments.filter( |
|
595 | 611 | coalesce(ChangesetComment.display_state, '') != |
|
596 | 612 | ChangesetComment.COMMENT_OUTDATED) |
|
597 | 613 | return comments |
|
598 | 614 | |
|
599 | 615 | def _all_inline_comments_of_pull_request(self, pull_request): |
|
600 | 616 | comments = Session().query(ChangesetComment)\ |
|
601 | 617 | .filter(ChangesetComment.line_no != None)\ |
|
602 | 618 | .filter(ChangesetComment.f_path != None)\ |
|
603 | 619 | .filter(ChangesetComment.pull_request == pull_request) |
|
604 | 620 | return comments |
|
605 | 621 | |
|
606 | 622 | def _all_general_comments_of_pull_request(self, pull_request): |
|
607 | 623 | comments = Session().query(ChangesetComment)\ |
|
608 | 624 | .filter(ChangesetComment.line_no == None)\ |
|
609 | 625 | .filter(ChangesetComment.f_path == None)\ |
|
610 | 626 | .filter(ChangesetComment.pull_request == pull_request) |
|
611 | 627 | return comments |
|
612 | 628 | |
|
613 | 629 | @staticmethod |
|
614 | 630 | def use_outdated_comments(pull_request): |
|
615 | 631 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
616 | 632 | settings = settings_model.get_general_settings() |
|
617 | 633 | return settings.get('rhodecode_use_outdated_comments', False) |
|
618 | 634 | |
|
619 | 635 | |
|
620 | 636 | def _parse_comment_line_number(line_no): |
|
621 | 637 | """ |
|
622 | 638 | Parses line numbers of the form "(o|n)\d+" and returns them in a tuple. |
|
623 | 639 | """ |
|
624 | 640 | old_line = None |
|
625 | 641 | new_line = None |
|
626 | 642 | if line_no.startswith('o'): |
|
627 | 643 | old_line = int(line_no[1:]) |
|
628 | 644 | elif line_no.startswith('n'): |
|
629 | 645 | new_line = int(line_no[1:]) |
|
630 | 646 | else: |
|
631 | 647 | raise ValueError("Comment lines have to start with either 'o' or 'n'.") |
|
632 | 648 | return diffs.DiffLineNumber(old_line, new_line) |
|
633 | 649 | |
|
634 | 650 | |
|
635 | 651 | def _diff_to_comment_line_number(diff_line): |
|
636 | 652 | if diff_line.new is not None: |
|
637 | 653 | return u'n{}'.format(diff_line.new) |
|
638 | 654 | elif diff_line.old is not None: |
|
639 | 655 | return u'o{}'.format(diff_line.old) |
|
640 | 656 | return u'' |
|
641 | 657 | |
|
642 | 658 | |
|
643 | 659 | def _diff_line_delta(a, b): |
|
644 | 660 | if None not in (a.new, b.new): |
|
645 | 661 | return abs(a.new - b.new) |
|
646 | 662 | elif None not in (a.old, b.old): |
|
647 | 663 | return abs(a.old - b.old) |
|
648 | 664 | else: |
|
649 | 665 | raise ValueError( |
|
650 | 666 | "Cannot compute delta between {} and {}".format(a, b)) |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
@@ -1,1528 +1,1554 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | pull request model for RhodeCode |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | from collections import namedtuple |
|
27 | 27 | import json |
|
28 | 28 | import logging |
|
29 | 29 | import datetime |
|
30 | 30 | import urllib |
|
31 | 31 | |
|
32 | 32 | from pylons.i18n.translation import _ |
|
33 | 33 | from pylons.i18n.translation import lazy_ugettext |
|
34 | 34 | from pyramid.threadlocal import get_current_request |
|
35 | 35 | from sqlalchemy import or_ |
|
36 | 36 | |
|
37 | 37 | from rhodecode import events |
|
38 | 38 | from rhodecode.lib import helpers as h, hooks_utils, diffs |
|
39 | from rhodecode.lib import audit_logger | |
|
39 | 40 | from rhodecode.lib.compat import OrderedDict |
|
40 | 41 | from rhodecode.lib.hooks_daemon import prepare_callback_daemon |
|
41 | 42 | from rhodecode.lib.markup_renderer import ( |
|
42 | 43 | DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer) |
|
43 | from rhodecode.lib.utils import action_logger | |
|
44 | 44 | from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe |
|
45 | 45 | from rhodecode.lib.vcs.backends.base import ( |
|
46 | 46 | Reference, MergeResponse, MergeFailureReason, UpdateFailureReason) |
|
47 | 47 | from rhodecode.lib.vcs.conf import settings as vcs_settings |
|
48 | 48 | from rhodecode.lib.vcs.exceptions import ( |
|
49 | 49 | CommitDoesNotExistError, EmptyRepositoryError) |
|
50 | 50 | from rhodecode.model import BaseModel |
|
51 | 51 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
52 | 52 | from rhodecode.model.comment import CommentsModel |
|
53 | 53 | from rhodecode.model.db import ( |
|
54 | 54 | PullRequest, PullRequestReviewers, ChangesetStatus, |
|
55 | 55 | PullRequestVersion, ChangesetComment, Repository) |
|
56 | 56 | from rhodecode.model.meta import Session |
|
57 | 57 | from rhodecode.model.notification import NotificationModel, \ |
|
58 | 58 | EmailNotificationModel |
|
59 | 59 | from rhodecode.model.scm import ScmModel |
|
60 | 60 | from rhodecode.model.settings import VcsSettingsModel |
|
61 | 61 | |
|
62 | 62 | |
|
63 | 63 | log = logging.getLogger(__name__) |
|
64 | 64 | |
|
65 | 65 | |
|
66 | 66 | # Data structure to hold the response data when updating commits during a pull |
|
67 | 67 | # request update. |
|
68 | 68 | UpdateResponse = namedtuple('UpdateResponse', [ |
|
69 | 69 | 'executed', 'reason', 'new', 'old', 'changes', |
|
70 | 70 | 'source_changed', 'target_changed']) |
|
71 | 71 | |
|
72 | 72 | |
|
73 | 73 | class PullRequestModel(BaseModel): |
|
74 | 74 | |
|
75 | 75 | cls = PullRequest |
|
76 | 76 | |
|
77 | 77 | DIFF_CONTEXT = 3 |
|
78 | 78 | |
|
79 | 79 | MERGE_STATUS_MESSAGES = { |
|
80 | 80 | MergeFailureReason.NONE: lazy_ugettext( |
|
81 | 81 | 'This pull request can be automatically merged.'), |
|
82 | 82 | MergeFailureReason.UNKNOWN: lazy_ugettext( |
|
83 | 83 | 'This pull request cannot be merged because of an unhandled' |
|
84 | 84 | ' exception.'), |
|
85 | 85 | MergeFailureReason.MERGE_FAILED: lazy_ugettext( |
|
86 | 86 | 'This pull request cannot be merged because of merge conflicts.'), |
|
87 | 87 | MergeFailureReason.PUSH_FAILED: lazy_ugettext( |
|
88 | 88 | 'This pull request could not be merged because push to target' |
|
89 | 89 | ' failed.'), |
|
90 | 90 | MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext( |
|
91 | 91 | 'This pull request cannot be merged because the target is not a' |
|
92 | 92 | ' head.'), |
|
93 | 93 | MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext( |
|
94 | 94 | 'This pull request cannot be merged because the source contains' |
|
95 | 95 | ' more branches than the target.'), |
|
96 | 96 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext( |
|
97 | 97 | 'This pull request cannot be merged because the target has' |
|
98 | 98 | ' multiple heads.'), |
|
99 | 99 | MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext( |
|
100 | 100 | 'This pull request cannot be merged because the target repository' |
|
101 | 101 | ' is locked.'), |
|
102 | 102 | MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext( |
|
103 | 103 | 'This pull request cannot be merged because the target or the ' |
|
104 | 104 | 'source reference is missing.'), |
|
105 | 105 | MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext( |
|
106 | 106 | 'This pull request cannot be merged because the target ' |
|
107 | 107 | 'reference is missing.'), |
|
108 | 108 | MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext( |
|
109 | 109 | 'This pull request cannot be merged because the source ' |
|
110 | 110 | 'reference is missing.'), |
|
111 | 111 | MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext( |
|
112 | 112 | 'This pull request cannot be merged because of conflicts related ' |
|
113 | 113 | 'to sub repositories.'), |
|
114 | 114 | } |
|
115 | 115 | |
|
116 | 116 | UPDATE_STATUS_MESSAGES = { |
|
117 | 117 | UpdateFailureReason.NONE: lazy_ugettext( |
|
118 | 118 | 'Pull request update successful.'), |
|
119 | 119 | UpdateFailureReason.UNKNOWN: lazy_ugettext( |
|
120 | 120 | 'Pull request update failed because of an unknown error.'), |
|
121 | 121 | UpdateFailureReason.NO_CHANGE: lazy_ugettext( |
|
122 | 122 | 'No update needed because the source and target have not changed.'), |
|
123 | 123 | UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext( |
|
124 | 124 | 'Pull request cannot be updated because the reference type is ' |
|
125 | 125 | 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'), |
|
126 | 126 | UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext( |
|
127 | 127 | 'This pull request cannot be updated because the target ' |
|
128 | 128 | 'reference is missing.'), |
|
129 | 129 | UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext( |
|
130 | 130 | 'This pull request cannot be updated because the source ' |
|
131 | 131 | 'reference is missing.'), |
|
132 | 132 | } |
|
133 | 133 | |
|
134 | 134 | def __get_pull_request(self, pull_request): |
|
135 | 135 | return self._get_instance(( |
|
136 | 136 | PullRequest, PullRequestVersion), pull_request) |
|
137 | 137 | |
|
138 | 138 | def _check_perms(self, perms, pull_request, user, api=False): |
|
139 | 139 | if not api: |
|
140 | 140 | return h.HasRepoPermissionAny(*perms)( |
|
141 | 141 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
142 | 142 | else: |
|
143 | 143 | return h.HasRepoPermissionAnyApi(*perms)( |
|
144 | 144 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
145 | 145 | |
|
146 | 146 | def check_user_read(self, pull_request, user, api=False): |
|
147 | 147 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
148 | 148 | return self._check_perms(_perms, pull_request, user, api) |
|
149 | 149 | |
|
150 | 150 | def check_user_merge(self, pull_request, user, api=False): |
|
151 | 151 | _perms = ('repository.admin', 'repository.write', 'hg.admin',) |
|
152 | 152 | return self._check_perms(_perms, pull_request, user, api) |
|
153 | 153 | |
|
154 | 154 | def check_user_update(self, pull_request, user, api=False): |
|
155 | 155 | owner = user.user_id == pull_request.user_id |
|
156 | 156 | return self.check_user_merge(pull_request, user, api) or owner |
|
157 | 157 | |
|
158 | 158 | def check_user_delete(self, pull_request, user): |
|
159 | 159 | owner = user.user_id == pull_request.user_id |
|
160 | 160 | _perms = ('repository.admin',) |
|
161 | 161 | return self._check_perms(_perms, pull_request, user) or owner |
|
162 | 162 | |
|
163 | 163 | def check_user_change_status(self, pull_request, user, api=False): |
|
164 | 164 | reviewer = user.user_id in [x.user_id for x in |
|
165 | 165 | pull_request.reviewers] |
|
166 | 166 | return self.check_user_update(pull_request, user, api) or reviewer |
|
167 | 167 | |
|
168 | 168 | def get(self, pull_request): |
|
169 | 169 | return self.__get_pull_request(pull_request) |
|
170 | 170 | |
|
171 | 171 | def _prepare_get_all_query(self, repo_name, source=False, statuses=None, |
|
172 | 172 | opened_by=None, order_by=None, |
|
173 | 173 | order_dir='desc'): |
|
174 | 174 | repo = None |
|
175 | 175 | if repo_name: |
|
176 | 176 | repo = self._get_repo(repo_name) |
|
177 | 177 | |
|
178 | 178 | q = PullRequest.query() |
|
179 | 179 | |
|
180 | 180 | # source or target |
|
181 | 181 | if repo and source: |
|
182 | 182 | q = q.filter(PullRequest.source_repo == repo) |
|
183 | 183 | elif repo: |
|
184 | 184 | q = q.filter(PullRequest.target_repo == repo) |
|
185 | 185 | |
|
186 | 186 | # closed,opened |
|
187 | 187 | if statuses: |
|
188 | 188 | q = q.filter(PullRequest.status.in_(statuses)) |
|
189 | 189 | |
|
190 | 190 | # opened by filter |
|
191 | 191 | if opened_by: |
|
192 | 192 | q = q.filter(PullRequest.user_id.in_(opened_by)) |
|
193 | 193 | |
|
194 | 194 | if order_by: |
|
195 | 195 | order_map = { |
|
196 | 196 | 'name_raw': PullRequest.pull_request_id, |
|
197 | 197 | 'title': PullRequest.title, |
|
198 | 198 | 'updated_on_raw': PullRequest.updated_on, |
|
199 | 199 | 'target_repo': PullRequest.target_repo_id |
|
200 | 200 | } |
|
201 | 201 | if order_dir == 'asc': |
|
202 | 202 | q = q.order_by(order_map[order_by].asc()) |
|
203 | 203 | else: |
|
204 | 204 | q = q.order_by(order_map[order_by].desc()) |
|
205 | 205 | |
|
206 | 206 | return q |
|
207 | 207 | |
|
208 | 208 | def count_all(self, repo_name, source=False, statuses=None, |
|
209 | 209 | opened_by=None): |
|
210 | 210 | """ |
|
211 | 211 | Count the number of pull requests for a specific repository. |
|
212 | 212 | |
|
213 | 213 | :param repo_name: target or source repo |
|
214 | 214 | :param source: boolean flag to specify if repo_name refers to source |
|
215 | 215 | :param statuses: list of pull request statuses |
|
216 | 216 | :param opened_by: author user of the pull request |
|
217 | 217 | :returns: int number of pull requests |
|
218 | 218 | """ |
|
219 | 219 | q = self._prepare_get_all_query( |
|
220 | 220 | repo_name, source=source, statuses=statuses, opened_by=opened_by) |
|
221 | 221 | |
|
222 | 222 | return q.count() |
|
223 | 223 | |
|
224 | 224 | def get_all(self, repo_name, source=False, statuses=None, opened_by=None, |
|
225 | 225 | offset=0, length=None, order_by=None, order_dir='desc'): |
|
226 | 226 | """ |
|
227 | 227 | Get all pull requests for a specific repository. |
|
228 | 228 | |
|
229 | 229 | :param repo_name: target or source repo |
|
230 | 230 | :param source: boolean flag to specify if repo_name refers to source |
|
231 | 231 | :param statuses: list of pull request statuses |
|
232 | 232 | :param opened_by: author user of the pull request |
|
233 | 233 | :param offset: pagination offset |
|
234 | 234 | :param length: length of returned list |
|
235 | 235 | :param order_by: order of the returned list |
|
236 | 236 | :param order_dir: 'asc' or 'desc' ordering direction |
|
237 | 237 | :returns: list of pull requests |
|
238 | 238 | """ |
|
239 | 239 | q = self._prepare_get_all_query( |
|
240 | 240 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
241 | 241 | order_by=order_by, order_dir=order_dir) |
|
242 | 242 | |
|
243 | 243 | if length: |
|
244 | 244 | pull_requests = q.limit(length).offset(offset).all() |
|
245 | 245 | else: |
|
246 | 246 | pull_requests = q.all() |
|
247 | 247 | |
|
248 | 248 | return pull_requests |
|
249 | 249 | |
|
250 | 250 | def count_awaiting_review(self, repo_name, source=False, statuses=None, |
|
251 | 251 | opened_by=None): |
|
252 | 252 | """ |
|
253 | 253 | Count the number of pull requests for a specific repository that are |
|
254 | 254 | awaiting review. |
|
255 | 255 | |
|
256 | 256 | :param repo_name: target or source repo |
|
257 | 257 | :param source: boolean flag to specify if repo_name refers to source |
|
258 | 258 | :param statuses: list of pull request statuses |
|
259 | 259 | :param opened_by: author user of the pull request |
|
260 | 260 | :returns: int number of pull requests |
|
261 | 261 | """ |
|
262 | 262 | pull_requests = self.get_awaiting_review( |
|
263 | 263 | repo_name, source=source, statuses=statuses, opened_by=opened_by) |
|
264 | 264 | |
|
265 | 265 | return len(pull_requests) |
|
266 | 266 | |
|
267 | 267 | def get_awaiting_review(self, repo_name, source=False, statuses=None, |
|
268 | 268 | opened_by=None, offset=0, length=None, |
|
269 | 269 | order_by=None, order_dir='desc'): |
|
270 | 270 | """ |
|
271 | 271 | Get all pull requests for a specific repository that are awaiting |
|
272 | 272 | review. |
|
273 | 273 | |
|
274 | 274 | :param repo_name: target or source repo |
|
275 | 275 | :param source: boolean flag to specify if repo_name refers to source |
|
276 | 276 | :param statuses: list of pull request statuses |
|
277 | 277 | :param opened_by: author user of the pull request |
|
278 | 278 | :param offset: pagination offset |
|
279 | 279 | :param length: length of returned list |
|
280 | 280 | :param order_by: order of the returned list |
|
281 | 281 | :param order_dir: 'asc' or 'desc' ordering direction |
|
282 | 282 | :returns: list of pull requests |
|
283 | 283 | """ |
|
284 | 284 | pull_requests = self.get_all( |
|
285 | 285 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
286 | 286 | order_by=order_by, order_dir=order_dir) |
|
287 | 287 | |
|
288 | 288 | _filtered_pull_requests = [] |
|
289 | 289 | for pr in pull_requests: |
|
290 | 290 | status = pr.calculated_review_status() |
|
291 | 291 | if status in [ChangesetStatus.STATUS_NOT_REVIEWED, |
|
292 | 292 | ChangesetStatus.STATUS_UNDER_REVIEW]: |
|
293 | 293 | _filtered_pull_requests.append(pr) |
|
294 | 294 | if length: |
|
295 | 295 | return _filtered_pull_requests[offset:offset+length] |
|
296 | 296 | else: |
|
297 | 297 | return _filtered_pull_requests |
|
298 | 298 | |
|
299 | 299 | def count_awaiting_my_review(self, repo_name, source=False, statuses=None, |
|
300 | 300 | opened_by=None, user_id=None): |
|
301 | 301 | """ |
|
302 | 302 | Count the number of pull requests for a specific repository that are |
|
303 | 303 | awaiting review from a specific user. |
|
304 | 304 | |
|
305 | 305 | :param repo_name: target or source repo |
|
306 | 306 | :param source: boolean flag to specify if repo_name refers to source |
|
307 | 307 | :param statuses: list of pull request statuses |
|
308 | 308 | :param opened_by: author user of the pull request |
|
309 | 309 | :param user_id: reviewer user of the pull request |
|
310 | 310 | :returns: int number of pull requests |
|
311 | 311 | """ |
|
312 | 312 | pull_requests = self.get_awaiting_my_review( |
|
313 | 313 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
314 | 314 | user_id=user_id) |
|
315 | 315 | |
|
316 | 316 | return len(pull_requests) |
|
317 | 317 | |
|
318 | 318 | def get_awaiting_my_review(self, repo_name, source=False, statuses=None, |
|
319 | 319 | opened_by=None, user_id=None, offset=0, |
|
320 | 320 | length=None, order_by=None, order_dir='desc'): |
|
321 | 321 | """ |
|
322 | 322 | Get all pull requests for a specific repository that are awaiting |
|
323 | 323 | review from a specific user. |
|
324 | 324 | |
|
325 | 325 | :param repo_name: target or source repo |
|
326 | 326 | :param source: boolean flag to specify if repo_name refers to source |
|
327 | 327 | :param statuses: list of pull request statuses |
|
328 | 328 | :param opened_by: author user of the pull request |
|
329 | 329 | :param user_id: reviewer user of the pull request |
|
330 | 330 | :param offset: pagination offset |
|
331 | 331 | :param length: length of returned list |
|
332 | 332 | :param order_by: order of the returned list |
|
333 | 333 | :param order_dir: 'asc' or 'desc' ordering direction |
|
334 | 334 | :returns: list of pull requests |
|
335 | 335 | """ |
|
336 | 336 | pull_requests = self.get_all( |
|
337 | 337 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
338 | 338 | order_by=order_by, order_dir=order_dir) |
|
339 | 339 | |
|
340 | 340 | _my = PullRequestModel().get_not_reviewed(user_id) |
|
341 | 341 | my_participation = [] |
|
342 | 342 | for pr in pull_requests: |
|
343 | 343 | if pr in _my: |
|
344 | 344 | my_participation.append(pr) |
|
345 | 345 | _filtered_pull_requests = my_participation |
|
346 | 346 | if length: |
|
347 | 347 | return _filtered_pull_requests[offset:offset+length] |
|
348 | 348 | else: |
|
349 | 349 | return _filtered_pull_requests |
|
350 | 350 | |
|
351 | 351 | def get_not_reviewed(self, user_id): |
|
352 | 352 | return [ |
|
353 | 353 | x.pull_request for x in PullRequestReviewers.query().filter( |
|
354 | 354 | PullRequestReviewers.user_id == user_id).all() |
|
355 | 355 | ] |
|
356 | 356 | |
|
357 | 357 | def _prepare_participating_query(self, user_id=None, statuses=None, |
|
358 | 358 | order_by=None, order_dir='desc'): |
|
359 | 359 | q = PullRequest.query() |
|
360 | 360 | if user_id: |
|
361 | 361 | reviewers_subquery = Session().query( |
|
362 | 362 | PullRequestReviewers.pull_request_id).filter( |
|
363 | 363 | PullRequestReviewers.user_id == user_id).subquery() |
|
364 | 364 | user_filter= or_( |
|
365 | 365 | PullRequest.user_id == user_id, |
|
366 | 366 | PullRequest.pull_request_id.in_(reviewers_subquery) |
|
367 | 367 | ) |
|
368 | 368 | q = PullRequest.query().filter(user_filter) |
|
369 | 369 | |
|
370 | 370 | # closed,opened |
|
371 | 371 | if statuses: |
|
372 | 372 | q = q.filter(PullRequest.status.in_(statuses)) |
|
373 | 373 | |
|
374 | 374 | if order_by: |
|
375 | 375 | order_map = { |
|
376 | 376 | 'name_raw': PullRequest.pull_request_id, |
|
377 | 377 | 'title': PullRequest.title, |
|
378 | 378 | 'updated_on_raw': PullRequest.updated_on, |
|
379 | 379 | 'target_repo': PullRequest.target_repo_id |
|
380 | 380 | } |
|
381 | 381 | if order_dir == 'asc': |
|
382 | 382 | q = q.order_by(order_map[order_by].asc()) |
|
383 | 383 | else: |
|
384 | 384 | q = q.order_by(order_map[order_by].desc()) |
|
385 | 385 | |
|
386 | 386 | return q |
|
387 | 387 | |
|
388 | 388 | def count_im_participating_in(self, user_id=None, statuses=None): |
|
389 | 389 | q = self._prepare_participating_query(user_id, statuses=statuses) |
|
390 | 390 | return q.count() |
|
391 | 391 | |
|
392 | 392 | def get_im_participating_in( |
|
393 | 393 | self, user_id=None, statuses=None, offset=0, |
|
394 | 394 | length=None, order_by=None, order_dir='desc'): |
|
395 | 395 | """ |
|
396 | 396 | Get all Pull requests that i'm participating in, or i have opened |
|
397 | 397 | """ |
|
398 | 398 | |
|
399 | 399 | q = self._prepare_participating_query( |
|
400 | 400 | user_id, statuses=statuses, order_by=order_by, |
|
401 | 401 | order_dir=order_dir) |
|
402 | 402 | |
|
403 | 403 | if length: |
|
404 | 404 | pull_requests = q.limit(length).offset(offset).all() |
|
405 | 405 | else: |
|
406 | 406 | pull_requests = q.all() |
|
407 | 407 | |
|
408 | 408 | return pull_requests |
|
409 | 409 | |
|
410 | 410 | def get_versions(self, pull_request): |
|
411 | 411 | """ |
|
412 | 412 | returns version of pull request sorted by ID descending |
|
413 | 413 | """ |
|
414 | 414 | return PullRequestVersion.query()\ |
|
415 | 415 | .filter(PullRequestVersion.pull_request == pull_request)\ |
|
416 | 416 | .order_by(PullRequestVersion.pull_request_version_id.asc())\ |
|
417 | 417 | .all() |
|
418 | 418 | |
|
419 | 419 | def create(self, created_by, source_repo, source_ref, target_repo, |
|
420 | 420 | target_ref, revisions, reviewers, title, description=None, |
|
421 | 421 | reviewer_data=None): |
|
422 | 422 | |
|
423 | 423 | created_by_user = self._get_user(created_by) |
|
424 | 424 | source_repo = self._get_repo(source_repo) |
|
425 | 425 | target_repo = self._get_repo(target_repo) |
|
426 | 426 | |
|
427 | 427 | pull_request = PullRequest() |
|
428 | 428 | pull_request.source_repo = source_repo |
|
429 | 429 | pull_request.source_ref = source_ref |
|
430 | 430 | pull_request.target_repo = target_repo |
|
431 | 431 | pull_request.target_ref = target_ref |
|
432 | 432 | pull_request.revisions = revisions |
|
433 | 433 | pull_request.title = title |
|
434 | 434 | pull_request.description = description |
|
435 | 435 | pull_request.author = created_by_user |
|
436 | 436 | pull_request.reviewer_data = reviewer_data |
|
437 | 437 | |
|
438 | 438 | Session().add(pull_request) |
|
439 | 439 | Session().flush() |
|
440 | 440 | |
|
441 | 441 | reviewer_ids = set() |
|
442 | 442 | # members / reviewers |
|
443 | 443 | for reviewer_object in reviewers: |
|
444 | 444 | user_id, reasons, mandatory = reviewer_object |
|
445 | 445 | user = self._get_user(user_id) |
|
446 | 446 | |
|
447 | 447 | # skip duplicates |
|
448 | 448 | if user.user_id in reviewer_ids: |
|
449 | 449 | continue |
|
450 | 450 | |
|
451 | 451 | reviewer_ids.add(user.user_id) |
|
452 | 452 | |
|
453 | 453 | reviewer = PullRequestReviewers() |
|
454 | 454 | reviewer.user = user |
|
455 | 455 | reviewer.pull_request = pull_request |
|
456 | 456 | reviewer.reasons = reasons |
|
457 | 457 | reviewer.mandatory = mandatory |
|
458 | 458 | Session().add(reviewer) |
|
459 | 459 | |
|
460 | 460 | # Set approval status to "Under Review" for all commits which are |
|
461 | 461 | # part of this pull request. |
|
462 | 462 | ChangesetStatusModel().set_status( |
|
463 | 463 | repo=target_repo, |
|
464 | 464 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
465 | 465 | user=created_by_user, |
|
466 | 466 | pull_request=pull_request |
|
467 | 467 | ) |
|
468 | 468 | |
|
469 | 469 | self.notify_reviewers(pull_request, reviewer_ids) |
|
470 | 470 | self._trigger_pull_request_hook( |
|
471 | 471 | pull_request, created_by_user, 'create') |
|
472 | 472 | |
|
473 | creation_data = pull_request.get_api_data(with_merge_state=False) | |
|
474 | self._log_audit_action( | |
|
475 | 'repo.pull_request.create', {'data': creation_data}, | |
|
476 | created_by_user, pull_request) | |
|
477 | ||
|
473 | 478 | return pull_request |
|
474 | 479 | |
|
475 | 480 | def _trigger_pull_request_hook(self, pull_request, user, action): |
|
476 | 481 | pull_request = self.__get_pull_request(pull_request) |
|
477 | 482 | target_scm = pull_request.target_repo.scm_instance() |
|
478 | 483 | if action == 'create': |
|
479 | 484 | trigger_hook = hooks_utils.trigger_log_create_pull_request_hook |
|
480 | 485 | elif action == 'merge': |
|
481 | 486 | trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook |
|
482 | 487 | elif action == 'close': |
|
483 | 488 | trigger_hook = hooks_utils.trigger_log_close_pull_request_hook |
|
484 | 489 | elif action == 'review_status_change': |
|
485 | 490 | trigger_hook = hooks_utils.trigger_log_review_pull_request_hook |
|
486 | 491 | elif action == 'update': |
|
487 | 492 | trigger_hook = hooks_utils.trigger_log_update_pull_request_hook |
|
488 | 493 | else: |
|
489 | 494 | return |
|
490 | 495 | |
|
491 | 496 | trigger_hook( |
|
492 | 497 | username=user.username, |
|
493 | 498 | repo_name=pull_request.target_repo.repo_name, |
|
494 | 499 | repo_alias=target_scm.alias, |
|
495 | 500 | pull_request=pull_request) |
|
496 | 501 | |
|
497 | 502 | def _get_commit_ids(self, pull_request): |
|
498 | 503 | """ |
|
499 | 504 | Return the commit ids of the merged pull request. |
|
500 | 505 | |
|
501 | 506 | This method is not dealing correctly yet with the lack of autoupdates |
|
502 | 507 | nor with the implicit target updates. |
|
503 | 508 | For example: if a commit in the source repo is already in the target it |
|
504 | 509 | will be reported anyways. |
|
505 | 510 | """ |
|
506 | 511 | merge_rev = pull_request.merge_rev |
|
507 | 512 | if merge_rev is None: |
|
508 | 513 | raise ValueError('This pull request was not merged yet') |
|
509 | 514 | |
|
510 | 515 | commit_ids = list(pull_request.revisions) |
|
511 | 516 | if merge_rev not in commit_ids: |
|
512 | 517 | commit_ids.append(merge_rev) |
|
513 | 518 | |
|
514 | 519 | return commit_ids |
|
515 | 520 | |
|
516 | 521 | def merge(self, pull_request, user, extras): |
|
517 | 522 | log.debug("Merging pull request %s", pull_request.pull_request_id) |
|
518 | 523 | merge_state = self._merge_pull_request(pull_request, user, extras) |
|
519 | 524 | if merge_state.executed: |
|
520 | 525 | log.debug( |
|
521 | 526 | "Merge was successful, updating the pull request comments.") |
|
522 | 527 | self._comment_and_close_pr(pull_request, user, merge_state) |
|
523 | self._log_action('user_merged_pull_request', user, pull_request) | |
|
528 | ||
|
529 | self._log_audit_action( | |
|
530 | 'repo.pull_request.merge', | |
|
531 | {'merge_state': merge_state.__dict__}, | |
|
532 | user, pull_request) | |
|
533 | ||
|
524 | 534 | else: |
|
525 | 535 | log.warn("Merge failed, not updating the pull request.") |
|
526 | 536 | return merge_state |
|
527 | 537 | |
|
528 | 538 | def _merge_pull_request(self, pull_request, user, extras): |
|
529 | 539 | target_vcs = pull_request.target_repo.scm_instance() |
|
530 | 540 | source_vcs = pull_request.source_repo.scm_instance() |
|
531 | 541 | target_ref = self._refresh_reference( |
|
532 | 542 | pull_request.target_ref_parts, target_vcs) |
|
533 | 543 | |
|
534 | 544 | message = _( |
|
535 | 545 | 'Merge pull request #%(pr_id)s from ' |
|
536 | 546 | '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % { |
|
537 | 547 | 'pr_id': pull_request.pull_request_id, |
|
538 | 548 | 'source_repo': source_vcs.name, |
|
539 | 549 | 'source_ref_name': pull_request.source_ref_parts.name, |
|
540 | 550 | 'pr_title': pull_request.title |
|
541 | 551 | } |
|
542 | 552 | |
|
543 | 553 | workspace_id = self._workspace_id(pull_request) |
|
544 | 554 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
545 | 555 | |
|
546 | 556 | callback_daemon, extras = prepare_callback_daemon( |
|
547 | 557 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, |
|
548 | 558 | use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS) |
|
549 | 559 | |
|
550 | 560 | with callback_daemon: |
|
551 | 561 | # TODO: johbo: Implement a clean way to run a config_override |
|
552 | 562 | # for a single call. |
|
553 | 563 | target_vcs.config.set( |
|
554 | 564 | 'rhodecode', 'RC_SCM_DATA', json.dumps(extras)) |
|
555 | 565 | merge_state = target_vcs.merge( |
|
556 | 566 | target_ref, source_vcs, pull_request.source_ref_parts, |
|
557 | 567 | workspace_id, user_name=user.username, |
|
558 | 568 | user_email=user.email, message=message, use_rebase=use_rebase) |
|
559 | 569 | return merge_state |
|
560 | 570 | |
|
561 | 571 | def _comment_and_close_pr(self, pull_request, user, merge_state): |
|
562 | 572 | pull_request.merge_rev = merge_state.merge_ref.commit_id |
|
563 | 573 | pull_request.updated_on = datetime.datetime.now() |
|
564 | 574 | |
|
565 | 575 | CommentsModel().create( |
|
566 | 576 | text=unicode(_('Pull request merged and closed')), |
|
567 | 577 | repo=pull_request.target_repo.repo_id, |
|
568 | 578 | user=user.user_id, |
|
569 | 579 | pull_request=pull_request.pull_request_id, |
|
570 | 580 | f_path=None, |
|
571 | 581 | line_no=None, |
|
572 | 582 | closing_pr=True |
|
573 | 583 | ) |
|
574 | 584 | |
|
575 | 585 | Session().add(pull_request) |
|
576 | 586 | Session().flush() |
|
577 | 587 | # TODO: paris: replace invalidation with less radical solution |
|
578 | 588 | ScmModel().mark_for_invalidation( |
|
579 | 589 | pull_request.target_repo.repo_name) |
|
580 | 590 | self._trigger_pull_request_hook(pull_request, user, 'merge') |
|
581 | 591 | |
|
582 | 592 | def has_valid_update_type(self, pull_request): |
|
583 | 593 | source_ref_type = pull_request.source_ref_parts.type |
|
584 | 594 | return source_ref_type in ['book', 'branch', 'tag'] |
|
585 | 595 | |
|
586 | 596 | def update_commits(self, pull_request): |
|
587 | 597 | """ |
|
588 | 598 | Get the updated list of commits for the pull request |
|
589 | 599 | and return the new pull request version and the list |
|
590 | 600 | of commits processed by this update action |
|
591 | 601 | """ |
|
592 | 602 | pull_request = self.__get_pull_request(pull_request) |
|
593 | 603 | source_ref_type = pull_request.source_ref_parts.type |
|
594 | 604 | source_ref_name = pull_request.source_ref_parts.name |
|
595 | 605 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
596 | 606 | |
|
597 | 607 | target_ref_type = pull_request.target_ref_parts.type |
|
598 | 608 | target_ref_name = pull_request.target_ref_parts.name |
|
599 | 609 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
600 | 610 | |
|
601 | 611 | if not self.has_valid_update_type(pull_request): |
|
602 | 612 | log.debug( |
|
603 | 613 | "Skipping update of pull request %s due to ref type: %s", |
|
604 | 614 | pull_request, source_ref_type) |
|
605 | 615 | return UpdateResponse( |
|
606 | 616 | executed=False, |
|
607 | 617 | reason=UpdateFailureReason.WRONG_REF_TYPE, |
|
608 | 618 | old=pull_request, new=None, changes=None, |
|
609 | 619 | source_changed=False, target_changed=False) |
|
610 | 620 | |
|
611 | 621 | # source repo |
|
612 | 622 | source_repo = pull_request.source_repo.scm_instance() |
|
613 | 623 | try: |
|
614 | 624 | source_commit = source_repo.get_commit(commit_id=source_ref_name) |
|
615 | 625 | except CommitDoesNotExistError: |
|
616 | 626 | return UpdateResponse( |
|
617 | 627 | executed=False, |
|
618 | 628 | reason=UpdateFailureReason.MISSING_SOURCE_REF, |
|
619 | 629 | old=pull_request, new=None, changes=None, |
|
620 | 630 | source_changed=False, target_changed=False) |
|
621 | 631 | |
|
622 | 632 | source_changed = source_ref_id != source_commit.raw_id |
|
623 | 633 | |
|
624 | 634 | # target repo |
|
625 | 635 | target_repo = pull_request.target_repo.scm_instance() |
|
626 | 636 | try: |
|
627 | 637 | target_commit = target_repo.get_commit(commit_id=target_ref_name) |
|
628 | 638 | except CommitDoesNotExistError: |
|
629 | 639 | return UpdateResponse( |
|
630 | 640 | executed=False, |
|
631 | 641 | reason=UpdateFailureReason.MISSING_TARGET_REF, |
|
632 | 642 | old=pull_request, new=None, changes=None, |
|
633 | 643 | source_changed=False, target_changed=False) |
|
634 | 644 | target_changed = target_ref_id != target_commit.raw_id |
|
635 | 645 | |
|
636 | 646 | if not (source_changed or target_changed): |
|
637 | 647 | log.debug("Nothing changed in pull request %s", pull_request) |
|
638 | 648 | return UpdateResponse( |
|
639 | 649 | executed=False, |
|
640 | 650 | reason=UpdateFailureReason.NO_CHANGE, |
|
641 | 651 | old=pull_request, new=None, changes=None, |
|
642 | 652 | source_changed=target_changed, target_changed=source_changed) |
|
643 | 653 | |
|
644 | 654 | change_in_found = 'target repo' if target_changed else 'source repo' |
|
645 | 655 | log.debug('Updating pull request because of change in %s detected', |
|
646 | 656 | change_in_found) |
|
647 | 657 | |
|
648 | 658 | # Finally there is a need for an update, in case of source change |
|
649 | 659 | # we create a new version, else just an update |
|
650 | 660 | if source_changed: |
|
651 | 661 | pull_request_version = self._create_version_from_snapshot(pull_request) |
|
652 | 662 | self._link_comments_to_version(pull_request_version) |
|
653 | 663 | else: |
|
654 | 664 | try: |
|
655 | 665 | ver = pull_request.versions[-1] |
|
656 | 666 | except IndexError: |
|
657 | 667 | ver = None |
|
658 | 668 | |
|
659 | 669 | pull_request.pull_request_version_id = \ |
|
660 | 670 | ver.pull_request_version_id if ver else None |
|
661 | 671 | pull_request_version = pull_request |
|
662 | 672 | |
|
663 | 673 | try: |
|
664 | 674 | if target_ref_type in ('tag', 'branch', 'book'): |
|
665 | 675 | target_commit = target_repo.get_commit(target_ref_name) |
|
666 | 676 | else: |
|
667 | 677 | target_commit = target_repo.get_commit(target_ref_id) |
|
668 | 678 | except CommitDoesNotExistError: |
|
669 | 679 | return UpdateResponse( |
|
670 | 680 | executed=False, |
|
671 | 681 | reason=UpdateFailureReason.MISSING_TARGET_REF, |
|
672 | 682 | old=pull_request, new=None, changes=None, |
|
673 | 683 | source_changed=source_changed, target_changed=target_changed) |
|
674 | 684 | |
|
675 | 685 | # re-compute commit ids |
|
676 | 686 | old_commit_ids = pull_request.revisions |
|
677 | 687 | pre_load = ["author", "branch", "date", "message"] |
|
678 | 688 | commit_ranges = target_repo.compare( |
|
679 | 689 | target_commit.raw_id, source_commit.raw_id, source_repo, merge=True, |
|
680 | 690 | pre_load=pre_load) |
|
681 | 691 | |
|
682 | 692 | ancestor = target_repo.get_common_ancestor( |
|
683 | 693 | target_commit.raw_id, source_commit.raw_id, source_repo) |
|
684 | 694 | |
|
685 | 695 | pull_request.source_ref = '%s:%s:%s' % ( |
|
686 | 696 | source_ref_type, source_ref_name, source_commit.raw_id) |
|
687 | 697 | pull_request.target_ref = '%s:%s:%s' % ( |
|
688 | 698 | target_ref_type, target_ref_name, ancestor) |
|
689 | 699 | |
|
690 | 700 | pull_request.revisions = [ |
|
691 | 701 | commit.raw_id for commit in reversed(commit_ranges)] |
|
692 | 702 | pull_request.updated_on = datetime.datetime.now() |
|
693 | 703 | Session().add(pull_request) |
|
694 | 704 | new_commit_ids = pull_request.revisions |
|
695 | 705 | |
|
696 | 706 | old_diff_data, new_diff_data = self._generate_update_diffs( |
|
697 | 707 | pull_request, pull_request_version) |
|
698 | 708 | |
|
699 | 709 | # calculate commit and file changes |
|
700 | 710 | changes = self._calculate_commit_id_changes( |
|
701 | 711 | old_commit_ids, new_commit_ids) |
|
702 | 712 | file_changes = self._calculate_file_changes( |
|
703 | 713 | old_diff_data, new_diff_data) |
|
704 | 714 | |
|
705 | 715 | # set comments as outdated if DIFFS changed |
|
706 | 716 | CommentsModel().outdate_comments( |
|
707 | 717 | pull_request, old_diff_data=old_diff_data, |
|
708 | 718 | new_diff_data=new_diff_data) |
|
709 | 719 | |
|
710 | 720 | commit_changes = (changes.added or changes.removed) |
|
711 | 721 | file_node_changes = ( |
|
712 | 722 | file_changes.added or file_changes.modified or file_changes.removed) |
|
713 | 723 | pr_has_changes = commit_changes or file_node_changes |
|
714 | 724 | |
|
715 | 725 | # Add an automatic comment to the pull request, in case |
|
716 | 726 | # anything has changed |
|
717 | 727 | if pr_has_changes: |
|
718 | 728 | update_comment = CommentsModel().create( |
|
719 | 729 | text=self._render_update_message(changes, file_changes), |
|
720 | 730 | repo=pull_request.target_repo, |
|
721 | 731 | user=pull_request.author, |
|
722 | 732 | pull_request=pull_request, |
|
723 | 733 | send_email=False, renderer=DEFAULT_COMMENTS_RENDERER) |
|
724 | 734 | |
|
725 | 735 | # Update status to "Under Review" for added commits |
|
726 | 736 | for commit_id in changes.added: |
|
727 | 737 | ChangesetStatusModel().set_status( |
|
728 | 738 | repo=pull_request.source_repo, |
|
729 | 739 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
730 | 740 | comment=update_comment, |
|
731 | 741 | user=pull_request.author, |
|
732 | 742 | pull_request=pull_request, |
|
733 | 743 | revision=commit_id) |
|
734 | 744 | |
|
735 | 745 | log.debug( |
|
736 | 746 | 'Updated pull request %s, added_ids: %s, common_ids: %s, ' |
|
737 | 747 | 'removed_ids: %s', pull_request.pull_request_id, |
|
738 | 748 | changes.added, changes.common, changes.removed) |
|
739 | 749 | log.debug( |
|
740 | 750 | 'Updated pull request with the following file changes: %s', |
|
741 | 751 | file_changes) |
|
742 | 752 | |
|
743 | 753 | log.info( |
|
744 | 754 | "Updated pull request %s from commit %s to commit %s, " |
|
745 | 755 | "stored new version %s of this pull request.", |
|
746 | 756 | pull_request.pull_request_id, source_ref_id, |
|
747 | 757 | pull_request.source_ref_parts.commit_id, |
|
748 | 758 | pull_request_version.pull_request_version_id) |
|
749 | 759 | Session().commit() |
|
750 | 760 | self._trigger_pull_request_hook( |
|
751 | 761 | pull_request, pull_request.author, 'update') |
|
752 | 762 | |
|
753 | 763 | return UpdateResponse( |
|
754 | 764 | executed=True, reason=UpdateFailureReason.NONE, |
|
755 | 765 | old=pull_request, new=pull_request_version, changes=changes, |
|
756 | 766 | source_changed=source_changed, target_changed=target_changed) |
|
757 | 767 | |
|
758 | 768 | def _create_version_from_snapshot(self, pull_request): |
|
759 | 769 | version = PullRequestVersion() |
|
760 | 770 | version.title = pull_request.title |
|
761 | 771 | version.description = pull_request.description |
|
762 | 772 | version.status = pull_request.status |
|
763 | 773 | version.created_on = datetime.datetime.now() |
|
764 | 774 | version.updated_on = pull_request.updated_on |
|
765 | 775 | version.user_id = pull_request.user_id |
|
766 | 776 | version.source_repo = pull_request.source_repo |
|
767 | 777 | version.source_ref = pull_request.source_ref |
|
768 | 778 | version.target_repo = pull_request.target_repo |
|
769 | 779 | version.target_ref = pull_request.target_ref |
|
770 | 780 | |
|
771 | 781 | version._last_merge_source_rev = pull_request._last_merge_source_rev |
|
772 | 782 | version._last_merge_target_rev = pull_request._last_merge_target_rev |
|
773 | 783 | version._last_merge_status = pull_request._last_merge_status |
|
774 | 784 | version.shadow_merge_ref = pull_request.shadow_merge_ref |
|
775 | 785 | version.merge_rev = pull_request.merge_rev |
|
776 | 786 | version.reviewer_data = pull_request.reviewer_data |
|
777 | 787 | |
|
778 | 788 | version.revisions = pull_request.revisions |
|
779 | 789 | version.pull_request = pull_request |
|
780 | 790 | Session().add(version) |
|
781 | 791 | Session().flush() |
|
782 | 792 | |
|
783 | 793 | return version |
|
784 | 794 | |
|
785 | 795 | def _generate_update_diffs(self, pull_request, pull_request_version): |
|
786 | 796 | |
|
787 | 797 | diff_context = ( |
|
788 | 798 | self.DIFF_CONTEXT + |
|
789 | 799 | CommentsModel.needed_extra_diff_context()) |
|
790 | 800 | |
|
791 | 801 | source_repo = pull_request_version.source_repo |
|
792 | 802 | source_ref_id = pull_request_version.source_ref_parts.commit_id |
|
793 | 803 | target_ref_id = pull_request_version.target_ref_parts.commit_id |
|
794 | 804 | old_diff = self._get_diff_from_pr_or_version( |
|
795 | 805 | source_repo, source_ref_id, target_ref_id, context=diff_context) |
|
796 | 806 | |
|
797 | 807 | source_repo = pull_request.source_repo |
|
798 | 808 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
799 | 809 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
800 | 810 | |
|
801 | 811 | new_diff = self._get_diff_from_pr_or_version( |
|
802 | 812 | source_repo, source_ref_id, target_ref_id, context=diff_context) |
|
803 | 813 | |
|
804 | 814 | old_diff_data = diffs.DiffProcessor(old_diff) |
|
805 | 815 | old_diff_data.prepare() |
|
806 | 816 | new_diff_data = diffs.DiffProcessor(new_diff) |
|
807 | 817 | new_diff_data.prepare() |
|
808 | 818 | |
|
809 | 819 | return old_diff_data, new_diff_data |
|
810 | 820 | |
|
811 | 821 | def _link_comments_to_version(self, pull_request_version): |
|
812 | 822 | """ |
|
813 | 823 | Link all unlinked comments of this pull request to the given version. |
|
814 | 824 | |
|
815 | 825 | :param pull_request_version: The `PullRequestVersion` to which |
|
816 | 826 | the comments shall be linked. |
|
817 | 827 | |
|
818 | 828 | """ |
|
819 | 829 | pull_request = pull_request_version.pull_request |
|
820 | 830 | comments = ChangesetComment.query()\ |
|
821 | 831 | .filter( |
|
822 | 832 | # TODO: johbo: Should we query for the repo at all here? |
|
823 | 833 | # Pending decision on how comments of PRs are to be related |
|
824 | 834 | # to either the source repo, the target repo or no repo at all. |
|
825 | 835 | ChangesetComment.repo_id == pull_request.target_repo.repo_id, |
|
826 | 836 | ChangesetComment.pull_request == pull_request, |
|
827 | 837 | ChangesetComment.pull_request_version == None)\ |
|
828 | 838 | .order_by(ChangesetComment.comment_id.asc()) |
|
829 | 839 | |
|
830 | 840 | # TODO: johbo: Find out why this breaks if it is done in a bulk |
|
831 | 841 | # operation. |
|
832 | 842 | for comment in comments: |
|
833 | 843 | comment.pull_request_version_id = ( |
|
834 | 844 | pull_request_version.pull_request_version_id) |
|
835 | 845 | Session().add(comment) |
|
836 | 846 | |
|
837 | 847 | def _calculate_commit_id_changes(self, old_ids, new_ids): |
|
838 | 848 | added = [x for x in new_ids if x not in old_ids] |
|
839 | 849 | common = [x for x in new_ids if x in old_ids] |
|
840 | 850 | removed = [x for x in old_ids if x not in new_ids] |
|
841 | 851 | total = new_ids |
|
842 | 852 | return ChangeTuple(added, common, removed, total) |
|
843 | 853 | |
|
844 | 854 | def _calculate_file_changes(self, old_diff_data, new_diff_data): |
|
845 | 855 | |
|
846 | 856 | old_files = OrderedDict() |
|
847 | 857 | for diff_data in old_diff_data.parsed_diff: |
|
848 | 858 | old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff']) |
|
849 | 859 | |
|
850 | 860 | added_files = [] |
|
851 | 861 | modified_files = [] |
|
852 | 862 | removed_files = [] |
|
853 | 863 | for diff_data in new_diff_data.parsed_diff: |
|
854 | 864 | new_filename = diff_data['filename'] |
|
855 | 865 | new_hash = md5_safe(diff_data['raw_diff']) |
|
856 | 866 | |
|
857 | 867 | old_hash = old_files.get(new_filename) |
|
858 | 868 | if not old_hash: |
|
859 | 869 | # file is not present in old diff, means it's added |
|
860 | 870 | added_files.append(new_filename) |
|
861 | 871 | else: |
|
862 | 872 | if new_hash != old_hash: |
|
863 | 873 | modified_files.append(new_filename) |
|
864 | 874 | # now remove a file from old, since we have seen it already |
|
865 | 875 | del old_files[new_filename] |
|
866 | 876 | |
|
867 | 877 | # removed files is when there are present in old, but not in NEW, |
|
868 | 878 | # since we remove old files that are present in new diff, left-overs |
|
869 | 879 | # if any should be the removed files |
|
870 | 880 | removed_files.extend(old_files.keys()) |
|
871 | 881 | |
|
872 | 882 | return FileChangeTuple(added_files, modified_files, removed_files) |
|
873 | 883 | |
|
874 | 884 | def _render_update_message(self, changes, file_changes): |
|
875 | 885 | """ |
|
876 | 886 | render the message using DEFAULT_COMMENTS_RENDERER (RST renderer), |
|
877 | 887 | so it's always looking the same disregarding on which default |
|
878 | 888 | renderer system is using. |
|
879 | 889 | |
|
880 | 890 | :param changes: changes named tuple |
|
881 | 891 | :param file_changes: file changes named tuple |
|
882 | 892 | |
|
883 | 893 | """ |
|
884 | 894 | new_status = ChangesetStatus.get_status_lbl( |
|
885 | 895 | ChangesetStatus.STATUS_UNDER_REVIEW) |
|
886 | 896 | |
|
887 | 897 | changed_files = ( |
|
888 | 898 | file_changes.added + file_changes.modified + file_changes.removed) |
|
889 | 899 | |
|
890 | 900 | params = { |
|
891 | 901 | 'under_review_label': new_status, |
|
892 | 902 | 'added_commits': changes.added, |
|
893 | 903 | 'removed_commits': changes.removed, |
|
894 | 904 | 'changed_files': changed_files, |
|
895 | 905 | 'added_files': file_changes.added, |
|
896 | 906 | 'modified_files': file_changes.modified, |
|
897 | 907 | 'removed_files': file_changes.removed, |
|
898 | 908 | } |
|
899 | 909 | renderer = RstTemplateRenderer() |
|
900 | 910 | return renderer.render('pull_request_update.mako', **params) |
|
901 | 911 | |
|
902 | def edit(self, pull_request, title, description): | |
|
912 | def edit(self, pull_request, title, description, user): | |
|
903 | 913 | pull_request = self.__get_pull_request(pull_request) |
|
914 | old_data = pull_request.get_api_data(with_merge_state=False) | |
|
904 | 915 | if pull_request.is_closed(): |
|
905 | 916 | raise ValueError('This pull request is closed') |
|
906 | 917 | if title: |
|
907 | 918 | pull_request.title = title |
|
908 | 919 | pull_request.description = description |
|
909 | 920 | pull_request.updated_on = datetime.datetime.now() |
|
910 | 921 | Session().add(pull_request) |
|
922 | self._log_audit_action( | |
|
923 | 'repo.pull_request.edit', {'old_data': old_data}, | |
|
924 | user, pull_request) | |
|
911 | 925 | |
|
912 | def update_reviewers(self, pull_request, reviewer_data): | |
|
926 | def update_reviewers(self, pull_request, reviewer_data, user): | |
|
913 | 927 | """ |
|
914 | 928 | Update the reviewers in the pull request |
|
915 | 929 | |
|
916 | 930 | :param pull_request: the pr to update |
|
917 | 931 | :param reviewer_data: list of tuples |
|
918 | 932 | [(user, ['reason1', 'reason2'], mandatory_flag)] |
|
919 | 933 | """ |
|
920 | 934 | |
|
921 | 935 | reviewers = {} |
|
922 | 936 | for user_id, reasons, mandatory in reviewer_data: |
|
923 | 937 | if isinstance(user_id, (int, basestring)): |
|
924 | 938 | user_id = self._get_user(user_id).user_id |
|
925 | 939 | reviewers[user_id] = { |
|
926 | 940 | 'reasons': reasons, 'mandatory': mandatory} |
|
927 | 941 | |
|
928 | 942 | reviewers_ids = set(reviewers.keys()) |
|
929 | 943 | pull_request = self.__get_pull_request(pull_request) |
|
930 | 944 | current_reviewers = PullRequestReviewers.query()\ |
|
931 | 945 | .filter(PullRequestReviewers.pull_request == |
|
932 | 946 | pull_request).all() |
|
933 | 947 | current_reviewers_ids = set([x.user.user_id for x in current_reviewers]) |
|
934 | 948 | |
|
935 | 949 | ids_to_add = reviewers_ids.difference(current_reviewers_ids) |
|
936 | 950 | ids_to_remove = current_reviewers_ids.difference(reviewers_ids) |
|
937 | 951 | |
|
938 | 952 | log.debug("Adding %s reviewers", ids_to_add) |
|
939 | 953 | log.debug("Removing %s reviewers", ids_to_remove) |
|
940 | 954 | changed = False |
|
941 | 955 | for uid in ids_to_add: |
|
942 | 956 | changed = True |
|
943 | 957 | _usr = self._get_user(uid) |
|
944 | 958 | reviewer = PullRequestReviewers() |
|
945 | 959 | reviewer.user = _usr |
|
946 | 960 | reviewer.pull_request = pull_request |
|
947 | 961 | reviewer.reasons = reviewers[uid]['reasons'] |
|
948 | 962 | # NOTE(marcink): mandatory shouldn't be changed now |
|
949 | #reviewer.mandatory = reviewers[uid]['reasons'] | |
|
963 | # reviewer.mandatory = reviewers[uid]['reasons'] | |
|
950 | 964 | Session().add(reviewer) |
|
965 | self._log_audit_action( | |
|
966 | 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()}, | |
|
967 | user, pull_request) | |
|
951 | 968 | |
|
952 | 969 | for uid in ids_to_remove: |
|
953 | 970 | changed = True |
|
954 | 971 | reviewers = PullRequestReviewers.query()\ |
|
955 | 972 | .filter(PullRequestReviewers.user_id == uid, |
|
956 | 973 | PullRequestReviewers.pull_request == pull_request)\ |
|
957 | 974 | .all() |
|
958 | 975 | # use .all() in case we accidentally added the same person twice |
|
959 | 976 | # this CAN happen due to the lack of DB checks |
|
960 | 977 | for obj in reviewers: |
|
978 | old_data = obj.get_dict() | |
|
961 | 979 | Session().delete(obj) |
|
980 | self._log_audit_action( | |
|
981 | 'repo.pull_request.reviewer.delete', | |
|
982 | {'old_data': old_data}, user, pull_request) | |
|
962 | 983 | |
|
963 | 984 | if changed: |
|
964 | 985 | pull_request.updated_on = datetime.datetime.now() |
|
965 | 986 | Session().add(pull_request) |
|
966 | 987 | |
|
967 | 988 | self.notify_reviewers(pull_request, ids_to_add) |
|
968 | 989 | return ids_to_add, ids_to_remove |
|
969 | 990 | |
|
970 | 991 | def get_url(self, pull_request, request=None, permalink=False): |
|
971 | 992 | if not request: |
|
972 | 993 | request = get_current_request() |
|
973 | 994 | |
|
974 | 995 | if permalink: |
|
975 | 996 | return request.route_url( |
|
976 | 997 | 'pull_requests_global', |
|
977 | 998 | pull_request_id=pull_request.pull_request_id,) |
|
978 | 999 | else: |
|
979 | 1000 | return request.route_url( |
|
980 | 1001 | 'pullrequest_show', |
|
981 | 1002 | repo_name=safe_str(pull_request.target_repo.repo_name), |
|
982 | 1003 | pull_request_id=pull_request.pull_request_id,) |
|
983 | 1004 | |
|
984 | 1005 | def get_shadow_clone_url(self, pull_request): |
|
985 | 1006 | """ |
|
986 | 1007 | Returns qualified url pointing to the shadow repository. If this pull |
|
987 | 1008 | request is closed there is no shadow repository and ``None`` will be |
|
988 | 1009 | returned. |
|
989 | 1010 | """ |
|
990 | 1011 | if pull_request.is_closed(): |
|
991 | 1012 | return None |
|
992 | 1013 | else: |
|
993 | 1014 | pr_url = urllib.unquote(self.get_url(pull_request)) |
|
994 | 1015 | return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url)) |
|
995 | 1016 | |
|
996 | 1017 | def notify_reviewers(self, pull_request, reviewers_ids): |
|
997 | 1018 | # notification to reviewers |
|
998 | 1019 | if not reviewers_ids: |
|
999 | 1020 | return |
|
1000 | 1021 | |
|
1001 | 1022 | pull_request_obj = pull_request |
|
1002 | 1023 | # get the current participants of this pull request |
|
1003 | 1024 | recipients = reviewers_ids |
|
1004 | 1025 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST |
|
1005 | 1026 | |
|
1006 | 1027 | pr_source_repo = pull_request_obj.source_repo |
|
1007 | 1028 | pr_target_repo = pull_request_obj.target_repo |
|
1008 | 1029 | |
|
1009 | 1030 | pr_url = h.url( |
|
1010 | 1031 | 'pullrequest_show', |
|
1011 | 1032 | repo_name=pr_target_repo.repo_name, |
|
1012 | 1033 | pull_request_id=pull_request_obj.pull_request_id, |
|
1013 | 1034 | qualified=True,) |
|
1014 | 1035 | |
|
1015 | 1036 | # set some variables for email notification |
|
1016 | 1037 | pr_target_repo_url = h.route_url( |
|
1017 | 1038 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
1018 | 1039 | |
|
1019 | 1040 | pr_source_repo_url = h.route_url( |
|
1020 | 1041 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
1021 | 1042 | |
|
1022 | 1043 | # pull request specifics |
|
1023 | 1044 | pull_request_commits = [ |
|
1024 | 1045 | (x.raw_id, x.message) |
|
1025 | 1046 | for x in map(pr_source_repo.get_commit, pull_request.revisions)] |
|
1026 | 1047 | |
|
1027 | 1048 | kwargs = { |
|
1028 | 1049 | 'user': pull_request.author, |
|
1029 | 1050 | 'pull_request': pull_request_obj, |
|
1030 | 1051 | 'pull_request_commits': pull_request_commits, |
|
1031 | 1052 | |
|
1032 | 1053 | 'pull_request_target_repo': pr_target_repo, |
|
1033 | 1054 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
1034 | 1055 | |
|
1035 | 1056 | 'pull_request_source_repo': pr_source_repo, |
|
1036 | 1057 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
1037 | 1058 | |
|
1038 | 1059 | 'pull_request_url': pr_url, |
|
1039 | 1060 | } |
|
1040 | 1061 | |
|
1041 | 1062 | # pre-generate the subject for notification itself |
|
1042 | 1063 | (subject, |
|
1043 | 1064 | _h, _e, # we don't care about those |
|
1044 | 1065 | body_plaintext) = EmailNotificationModel().render_email( |
|
1045 | 1066 | notification_type, **kwargs) |
|
1046 | 1067 | |
|
1047 | 1068 | # create notification objects, and emails |
|
1048 | 1069 | NotificationModel().create( |
|
1049 | 1070 | created_by=pull_request.author, |
|
1050 | 1071 | notification_subject=subject, |
|
1051 | 1072 | notification_body=body_plaintext, |
|
1052 | 1073 | notification_type=notification_type, |
|
1053 | 1074 | recipients=recipients, |
|
1054 | 1075 | email_kwargs=kwargs, |
|
1055 | 1076 | ) |
|
1056 | 1077 | |
|
1057 | def delete(self, pull_request): | |
|
1078 | def delete(self, pull_request, user): | |
|
1058 | 1079 | pull_request = self.__get_pull_request(pull_request) |
|
1080 | old_data = pull_request.get_api_data(with_merge_state=False) | |
|
1059 | 1081 | self._cleanup_merge_workspace(pull_request) |
|
1082 | self._log_audit_action( | |
|
1083 | 'repo.pull_request.delete', {'old_data': old_data}, | |
|
1084 | user, pull_request) | |
|
1060 | 1085 | Session().delete(pull_request) |
|
1061 | 1086 | |
|
1062 | 1087 | def close_pull_request(self, pull_request, user): |
|
1063 | 1088 | pull_request = self.__get_pull_request(pull_request) |
|
1064 | 1089 | self._cleanup_merge_workspace(pull_request) |
|
1065 | 1090 | pull_request.status = PullRequest.STATUS_CLOSED |
|
1066 | 1091 | pull_request.updated_on = datetime.datetime.now() |
|
1067 | 1092 | Session().add(pull_request) |
|
1068 | 1093 | self._trigger_pull_request_hook( |
|
1069 | 1094 | pull_request, pull_request.author, 'close') |
|
1070 | self._log_action('user_closed_pull_request', user, pull_request) | |
|
1095 | self._log_audit_action( | |
|
1096 | 'repo.pull_request.close', {}, user, pull_request) | |
|
1071 | 1097 | |
|
1072 | 1098 | def close_pull_request_with_comment( |
|
1073 | 1099 | self, pull_request, user, repo, message=None): |
|
1074 | 1100 | |
|
1075 | 1101 | pull_request_review_status = pull_request.calculated_review_status() |
|
1076 | 1102 | |
|
1077 | 1103 | if pull_request_review_status == ChangesetStatus.STATUS_APPROVED: |
|
1078 | 1104 | # approved only if we have voting consent |
|
1079 | 1105 | status = ChangesetStatus.STATUS_APPROVED |
|
1080 | 1106 | else: |
|
1081 | 1107 | status = ChangesetStatus.STATUS_REJECTED |
|
1082 | 1108 | status_lbl = ChangesetStatus.get_status_lbl(status) |
|
1083 | 1109 | |
|
1084 | 1110 | default_message = ( |
|
1085 | 1111 | _('Closing with status change {transition_icon} {status}.') |
|
1086 | 1112 | ).format(transition_icon='>', status=status_lbl) |
|
1087 | 1113 | text = message or default_message |
|
1088 | 1114 | |
|
1089 | 1115 | # create a comment, and link it to new status |
|
1090 | 1116 | comment = CommentsModel().create( |
|
1091 | 1117 | text=text, |
|
1092 | 1118 | repo=repo.repo_id, |
|
1093 | 1119 | user=user.user_id, |
|
1094 | 1120 | pull_request=pull_request.pull_request_id, |
|
1095 | 1121 | status_change=status_lbl, |
|
1096 | 1122 | status_change_type=status, |
|
1097 | 1123 | closing_pr=True |
|
1098 | 1124 | ) |
|
1099 | 1125 | |
|
1100 | 1126 | # calculate old status before we change it |
|
1101 | 1127 | old_calculated_status = pull_request.calculated_review_status() |
|
1102 | 1128 | ChangesetStatusModel().set_status( |
|
1103 | 1129 | repo.repo_id, |
|
1104 | 1130 | status, |
|
1105 | 1131 | user.user_id, |
|
1106 | 1132 | comment=comment, |
|
1107 | 1133 | pull_request=pull_request.pull_request_id |
|
1108 | 1134 | ) |
|
1109 | 1135 | |
|
1110 | 1136 | Session().flush() |
|
1111 | 1137 | events.trigger(events.PullRequestCommentEvent(pull_request, comment)) |
|
1112 | 1138 | # we now calculate the status of pull request again, and based on that |
|
1113 | 1139 | # calculation trigger status change. This might happen in cases |
|
1114 | 1140 | # that non-reviewer admin closes a pr, which means his vote doesn't |
|
1115 | 1141 | # change the status, while if he's a reviewer this might change it. |
|
1116 | 1142 | calculated_status = pull_request.calculated_review_status() |
|
1117 | 1143 | if old_calculated_status != calculated_status: |
|
1118 | 1144 | self._trigger_pull_request_hook( |
|
1119 | 1145 | pull_request, user, 'review_status_change') |
|
1120 | 1146 | |
|
1121 | 1147 | # finally close the PR |
|
1122 | 1148 | PullRequestModel().close_pull_request( |
|
1123 | 1149 | pull_request.pull_request_id, user) |
|
1124 | 1150 | |
|
1125 | 1151 | return comment, status |
|
1126 | 1152 | |
|
1127 | 1153 | def merge_status(self, pull_request): |
|
1128 | 1154 | if not self._is_merge_enabled(pull_request): |
|
1129 | 1155 | return False, _('Server-side pull request merging is disabled.') |
|
1130 | 1156 | if pull_request.is_closed(): |
|
1131 | 1157 | return False, _('This pull request is closed.') |
|
1132 | 1158 | merge_possible, msg = self._check_repo_requirements( |
|
1133 | 1159 | target=pull_request.target_repo, source=pull_request.source_repo) |
|
1134 | 1160 | if not merge_possible: |
|
1135 | 1161 | return merge_possible, msg |
|
1136 | 1162 | |
|
1137 | 1163 | try: |
|
1138 | 1164 | resp = self._try_merge(pull_request) |
|
1139 | 1165 | log.debug("Merge response: %s", resp) |
|
1140 | 1166 | status = resp.possible, self.merge_status_message( |
|
1141 | 1167 | resp.failure_reason) |
|
1142 | 1168 | except NotImplementedError: |
|
1143 | 1169 | status = False, _('Pull request merging is not supported.') |
|
1144 | 1170 | |
|
1145 | 1171 | return status |
|
1146 | 1172 | |
|
1147 | 1173 | def _check_repo_requirements(self, target, source): |
|
1148 | 1174 | """ |
|
1149 | 1175 | Check if `target` and `source` have compatible requirements. |
|
1150 | 1176 | |
|
1151 | 1177 | Currently this is just checking for largefiles. |
|
1152 | 1178 | """ |
|
1153 | 1179 | target_has_largefiles = self._has_largefiles(target) |
|
1154 | 1180 | source_has_largefiles = self._has_largefiles(source) |
|
1155 | 1181 | merge_possible = True |
|
1156 | 1182 | message = u'' |
|
1157 | 1183 | |
|
1158 | 1184 | if target_has_largefiles != source_has_largefiles: |
|
1159 | 1185 | merge_possible = False |
|
1160 | 1186 | if source_has_largefiles: |
|
1161 | 1187 | message = _( |
|
1162 | 1188 | 'Target repository large files support is disabled.') |
|
1163 | 1189 | else: |
|
1164 | 1190 | message = _( |
|
1165 | 1191 | 'Source repository large files support is disabled.') |
|
1166 | 1192 | |
|
1167 | 1193 | return merge_possible, message |
|
1168 | 1194 | |
|
1169 | 1195 | def _has_largefiles(self, repo): |
|
1170 | 1196 | largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings( |
|
1171 | 1197 | 'extensions', 'largefiles') |
|
1172 | 1198 | return largefiles_ui and largefiles_ui[0].active |
|
1173 | 1199 | |
|
1174 | 1200 | def _try_merge(self, pull_request): |
|
1175 | 1201 | """ |
|
1176 | 1202 | Try to merge the pull request and return the merge status. |
|
1177 | 1203 | """ |
|
1178 | 1204 | log.debug( |
|
1179 | 1205 | "Trying out if the pull request %s can be merged.", |
|
1180 | 1206 | pull_request.pull_request_id) |
|
1181 | 1207 | target_vcs = pull_request.target_repo.scm_instance() |
|
1182 | 1208 | |
|
1183 | 1209 | # Refresh the target reference. |
|
1184 | 1210 | try: |
|
1185 | 1211 | target_ref = self._refresh_reference( |
|
1186 | 1212 | pull_request.target_ref_parts, target_vcs) |
|
1187 | 1213 | except CommitDoesNotExistError: |
|
1188 | 1214 | merge_state = MergeResponse( |
|
1189 | 1215 | False, False, None, MergeFailureReason.MISSING_TARGET_REF) |
|
1190 | 1216 | return merge_state |
|
1191 | 1217 | |
|
1192 | 1218 | target_locked = pull_request.target_repo.locked |
|
1193 | 1219 | if target_locked and target_locked[0]: |
|
1194 | 1220 | log.debug("The target repository is locked.") |
|
1195 | 1221 | merge_state = MergeResponse( |
|
1196 | 1222 | False, False, None, MergeFailureReason.TARGET_IS_LOCKED) |
|
1197 | 1223 | elif self._needs_merge_state_refresh(pull_request, target_ref): |
|
1198 | 1224 | log.debug("Refreshing the merge status of the repository.") |
|
1199 | 1225 | merge_state = self._refresh_merge_state( |
|
1200 | 1226 | pull_request, target_vcs, target_ref) |
|
1201 | 1227 | else: |
|
1202 | 1228 | possible = pull_request.\ |
|
1203 | 1229 | _last_merge_status == MergeFailureReason.NONE |
|
1204 | 1230 | merge_state = MergeResponse( |
|
1205 | 1231 | possible, False, None, pull_request._last_merge_status) |
|
1206 | 1232 | |
|
1207 | 1233 | return merge_state |
|
1208 | 1234 | |
|
1209 | 1235 | def _refresh_reference(self, reference, vcs_repository): |
|
1210 | 1236 | if reference.type in ('branch', 'book'): |
|
1211 | 1237 | name_or_id = reference.name |
|
1212 | 1238 | else: |
|
1213 | 1239 | name_or_id = reference.commit_id |
|
1214 | 1240 | refreshed_commit = vcs_repository.get_commit(name_or_id) |
|
1215 | 1241 | refreshed_reference = Reference( |
|
1216 | 1242 | reference.type, reference.name, refreshed_commit.raw_id) |
|
1217 | 1243 | return refreshed_reference |
|
1218 | 1244 | |
|
1219 | 1245 | def _needs_merge_state_refresh(self, pull_request, target_reference): |
|
1220 | 1246 | return not( |
|
1221 | 1247 | pull_request.revisions and |
|
1222 | 1248 | pull_request.revisions[0] == pull_request._last_merge_source_rev and |
|
1223 | 1249 | target_reference.commit_id == pull_request._last_merge_target_rev) |
|
1224 | 1250 | |
|
1225 | 1251 | def _refresh_merge_state(self, pull_request, target_vcs, target_reference): |
|
1226 | 1252 | workspace_id = self._workspace_id(pull_request) |
|
1227 | 1253 | source_vcs = pull_request.source_repo.scm_instance() |
|
1228 | 1254 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
1229 | 1255 | merge_state = target_vcs.merge( |
|
1230 | 1256 | target_reference, source_vcs, pull_request.source_ref_parts, |
|
1231 | 1257 | workspace_id, dry_run=True, use_rebase=use_rebase) |
|
1232 | 1258 | |
|
1233 | 1259 | # Do not store the response if there was an unknown error. |
|
1234 | 1260 | if merge_state.failure_reason != MergeFailureReason.UNKNOWN: |
|
1235 | 1261 | pull_request._last_merge_source_rev = \ |
|
1236 | 1262 | pull_request.source_ref_parts.commit_id |
|
1237 | 1263 | pull_request._last_merge_target_rev = target_reference.commit_id |
|
1238 | 1264 | pull_request._last_merge_status = merge_state.failure_reason |
|
1239 | 1265 | pull_request.shadow_merge_ref = merge_state.merge_ref |
|
1240 | 1266 | Session().add(pull_request) |
|
1241 | 1267 | Session().commit() |
|
1242 | 1268 | |
|
1243 | 1269 | return merge_state |
|
1244 | 1270 | |
|
1245 | 1271 | def _workspace_id(self, pull_request): |
|
1246 | 1272 | workspace_id = 'pr-%s' % pull_request.pull_request_id |
|
1247 | 1273 | return workspace_id |
|
1248 | 1274 | |
|
1249 | 1275 | def merge_status_message(self, status_code): |
|
1250 | 1276 | """ |
|
1251 | 1277 | Return a human friendly error message for the given merge status code. |
|
1252 | 1278 | """ |
|
1253 | 1279 | return self.MERGE_STATUS_MESSAGES[status_code] |
|
1254 | 1280 | |
|
1255 | 1281 | def generate_repo_data(self, repo, commit_id=None, branch=None, |
|
1256 | 1282 | bookmark=None): |
|
1257 | 1283 | all_refs, selected_ref = \ |
|
1258 | 1284 | self._get_repo_pullrequest_sources( |
|
1259 | 1285 | repo.scm_instance(), commit_id=commit_id, |
|
1260 | 1286 | branch=branch, bookmark=bookmark) |
|
1261 | 1287 | |
|
1262 | 1288 | refs_select2 = [] |
|
1263 | 1289 | for element in all_refs: |
|
1264 | 1290 | children = [{'id': x[0], 'text': x[1]} for x in element[0]] |
|
1265 | 1291 | refs_select2.append({'text': element[1], 'children': children}) |
|
1266 | 1292 | |
|
1267 | 1293 | return { |
|
1268 | 1294 | 'user': { |
|
1269 | 1295 | 'user_id': repo.user.user_id, |
|
1270 | 1296 | 'username': repo.user.username, |
|
1271 | 1297 | 'firstname': repo.user.firstname, |
|
1272 | 1298 | 'lastname': repo.user.lastname, |
|
1273 | 1299 | 'gravatar_link': h.gravatar_url(repo.user.email, 14), |
|
1274 | 1300 | }, |
|
1275 | 1301 | 'description': h.chop_at_smart(repo.description, '\n'), |
|
1276 | 1302 | 'refs': { |
|
1277 | 1303 | 'all_refs': all_refs, |
|
1278 | 1304 | 'selected_ref': selected_ref, |
|
1279 | 1305 | 'select2_refs': refs_select2 |
|
1280 | 1306 | } |
|
1281 | 1307 | } |
|
1282 | 1308 | |
|
1283 | 1309 | def generate_pullrequest_title(self, source, source_ref, target): |
|
1284 | 1310 | return u'{source}#{at_ref} to {target}'.format( |
|
1285 | 1311 | source=source, |
|
1286 | 1312 | at_ref=source_ref, |
|
1287 | 1313 | target=target, |
|
1288 | 1314 | ) |
|
1289 | 1315 | |
|
1290 | 1316 | def _cleanup_merge_workspace(self, pull_request): |
|
1291 | 1317 | # Merging related cleanup |
|
1292 | 1318 | target_scm = pull_request.target_repo.scm_instance() |
|
1293 | 1319 | workspace_id = 'pr-%s' % pull_request.pull_request_id |
|
1294 | 1320 | |
|
1295 | 1321 | try: |
|
1296 | 1322 | target_scm.cleanup_merge_workspace(workspace_id) |
|
1297 | 1323 | except NotImplementedError: |
|
1298 | 1324 | pass |
|
1299 | 1325 | |
|
1300 | 1326 | def _get_repo_pullrequest_sources( |
|
1301 | 1327 | self, repo, commit_id=None, branch=None, bookmark=None): |
|
1302 | 1328 | """ |
|
1303 | 1329 | Return a structure with repo's interesting commits, suitable for |
|
1304 | 1330 | the selectors in pullrequest controller |
|
1305 | 1331 | |
|
1306 | 1332 | :param commit_id: a commit that must be in the list somehow |
|
1307 | 1333 | and selected by default |
|
1308 | 1334 | :param branch: a branch that must be in the list and selected |
|
1309 | 1335 | by default - even if closed |
|
1310 | 1336 | :param bookmark: a bookmark that must be in the list and selected |
|
1311 | 1337 | """ |
|
1312 | 1338 | |
|
1313 | 1339 | commit_id = safe_str(commit_id) if commit_id else None |
|
1314 | 1340 | branch = safe_str(branch) if branch else None |
|
1315 | 1341 | bookmark = safe_str(bookmark) if bookmark else None |
|
1316 | 1342 | |
|
1317 | 1343 | selected = None |
|
1318 | 1344 | |
|
1319 | 1345 | # order matters: first source that has commit_id in it will be selected |
|
1320 | 1346 | sources = [] |
|
1321 | 1347 | sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark)) |
|
1322 | 1348 | sources.append(('branch', repo.branches.items(), _('Branches'), branch)) |
|
1323 | 1349 | |
|
1324 | 1350 | if commit_id: |
|
1325 | 1351 | ref_commit = (h.short_id(commit_id), commit_id) |
|
1326 | 1352 | sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id)) |
|
1327 | 1353 | |
|
1328 | 1354 | sources.append( |
|
1329 | 1355 | ('branch', repo.branches_closed.items(), _('Closed Branches'), branch), |
|
1330 | 1356 | ) |
|
1331 | 1357 | |
|
1332 | 1358 | groups = [] |
|
1333 | 1359 | for group_key, ref_list, group_name, match in sources: |
|
1334 | 1360 | group_refs = [] |
|
1335 | 1361 | for ref_name, ref_id in ref_list: |
|
1336 | 1362 | ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id) |
|
1337 | 1363 | group_refs.append((ref_key, ref_name)) |
|
1338 | 1364 | |
|
1339 | 1365 | if not selected: |
|
1340 | 1366 | if set([commit_id, match]) & set([ref_id, ref_name]): |
|
1341 | 1367 | selected = ref_key |
|
1342 | 1368 | |
|
1343 | 1369 | if group_refs: |
|
1344 | 1370 | groups.append((group_refs, group_name)) |
|
1345 | 1371 | |
|
1346 | 1372 | if not selected: |
|
1347 | 1373 | ref = commit_id or branch or bookmark |
|
1348 | 1374 | if ref: |
|
1349 | 1375 | raise CommitDoesNotExistError( |
|
1350 | 1376 | 'No commit refs could be found matching: %s' % ref) |
|
1351 | 1377 | elif repo.DEFAULT_BRANCH_NAME in repo.branches: |
|
1352 | 1378 | selected = 'branch:%s:%s' % ( |
|
1353 | 1379 | repo.DEFAULT_BRANCH_NAME, |
|
1354 | 1380 | repo.branches[repo.DEFAULT_BRANCH_NAME] |
|
1355 | 1381 | ) |
|
1356 | 1382 | elif repo.commit_ids: |
|
1357 | 1383 | rev = repo.commit_ids[0] |
|
1358 | 1384 | selected = 'rev:%s:%s' % (rev, rev) |
|
1359 | 1385 | else: |
|
1360 | 1386 | raise EmptyRepositoryError() |
|
1361 | 1387 | return groups, selected |
|
1362 | 1388 | |
|
1363 | 1389 | def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT): |
|
1364 | 1390 | return self._get_diff_from_pr_or_version( |
|
1365 | 1391 | source_repo, source_ref_id, target_ref_id, context=context) |
|
1366 | 1392 | |
|
1367 | 1393 | def _get_diff_from_pr_or_version( |
|
1368 | 1394 | self, source_repo, source_ref_id, target_ref_id, context): |
|
1369 | 1395 | target_commit = source_repo.get_commit( |
|
1370 | 1396 | commit_id=safe_str(target_ref_id)) |
|
1371 | 1397 | source_commit = source_repo.get_commit( |
|
1372 | 1398 | commit_id=safe_str(source_ref_id)) |
|
1373 | 1399 | if isinstance(source_repo, Repository): |
|
1374 | 1400 | vcs_repo = source_repo.scm_instance() |
|
1375 | 1401 | else: |
|
1376 | 1402 | vcs_repo = source_repo |
|
1377 | 1403 | |
|
1378 | 1404 | # TODO: johbo: In the context of an update, we cannot reach |
|
1379 | 1405 | # the old commit anymore with our normal mechanisms. It needs |
|
1380 | 1406 | # some sort of special support in the vcs layer to avoid this |
|
1381 | 1407 | # workaround. |
|
1382 | 1408 | if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and |
|
1383 | 1409 | vcs_repo.alias == 'git'): |
|
1384 | 1410 | source_commit.raw_id = safe_str(source_ref_id) |
|
1385 | 1411 | |
|
1386 | 1412 | log.debug('calculating diff between ' |
|
1387 | 1413 | 'source_ref:%s and target_ref:%s for repo `%s`', |
|
1388 | 1414 | target_ref_id, source_ref_id, |
|
1389 | 1415 | safe_unicode(vcs_repo.path)) |
|
1390 | 1416 | |
|
1391 | 1417 | vcs_diff = vcs_repo.get_diff( |
|
1392 | 1418 | commit1=target_commit, commit2=source_commit, context=context) |
|
1393 | 1419 | return vcs_diff |
|
1394 | 1420 | |
|
1395 | 1421 | def _is_merge_enabled(self, pull_request): |
|
1396 | 1422 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
1397 | 1423 | settings = settings_model.get_general_settings() |
|
1398 | 1424 | return settings.get('rhodecode_pr_merge_enabled', False) |
|
1399 | 1425 | |
|
1400 | 1426 | def _use_rebase_for_merging(self, pull_request): |
|
1401 | 1427 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
1402 | 1428 | settings = settings_model.get_general_settings() |
|
1403 | 1429 | return settings.get('rhodecode_hg_use_rebase_for_merging', False) |
|
1404 | 1430 | |
|
1405 | def _log_action(self, action, user, pull_request): | |
|
1406 |
a |
|
|
1407 |
|
|
|
1408 | '{action}:{pr_id}'.format( | |
|
1409 | action=action, pr_id=pull_request.pull_request_id), | |
|
1410 | pull_request.target_repo) | |
|
1431 | def _log_audit_action(self, action, action_data, user, pull_request): | |
|
1432 | audit_logger.store( | |
|
1433 | action=action, | |
|
1434 | action_data=action_data, | |
|
1435 | user=user, | |
|
1436 | repo=pull_request.target_repo) | |
|
1411 | 1437 | |
|
1412 | 1438 | def get_reviewer_functions(self): |
|
1413 | 1439 | """ |
|
1414 | 1440 | Fetches functions for validation and fetching default reviewers. |
|
1415 | 1441 | If available we use the EE package, else we fallback to CE |
|
1416 | 1442 | package functions |
|
1417 | 1443 | """ |
|
1418 | 1444 | try: |
|
1419 | 1445 | from rc_reviewers.utils import get_default_reviewers_data |
|
1420 | 1446 | from rc_reviewers.utils import validate_default_reviewers |
|
1421 | 1447 | except ImportError: |
|
1422 | 1448 | from rhodecode.apps.repository.utils import \ |
|
1423 | 1449 | get_default_reviewers_data |
|
1424 | 1450 | from rhodecode.apps.repository.utils import \ |
|
1425 | 1451 | validate_default_reviewers |
|
1426 | 1452 | |
|
1427 | 1453 | return get_default_reviewers_data, validate_default_reviewers |
|
1428 | 1454 | |
|
1429 | 1455 | |
|
1430 | 1456 | class MergeCheck(object): |
|
1431 | 1457 | """ |
|
1432 | 1458 | Perform Merge Checks and returns a check object which stores information |
|
1433 | 1459 | about merge errors, and merge conditions |
|
1434 | 1460 | """ |
|
1435 | 1461 | TODO_CHECK = 'todo' |
|
1436 | 1462 | PERM_CHECK = 'perm' |
|
1437 | 1463 | REVIEW_CHECK = 'review' |
|
1438 | 1464 | MERGE_CHECK = 'merge' |
|
1439 | 1465 | |
|
1440 | 1466 | def __init__(self): |
|
1441 | 1467 | self.review_status = None |
|
1442 | 1468 | self.merge_possible = None |
|
1443 | 1469 | self.merge_msg = '' |
|
1444 | 1470 | self.failed = None |
|
1445 | 1471 | self.errors = [] |
|
1446 | 1472 | self.error_details = OrderedDict() |
|
1447 | 1473 | |
|
1448 | 1474 | def push_error(self, error_type, message, error_key, details): |
|
1449 | 1475 | self.failed = True |
|
1450 | 1476 | self.errors.append([error_type, message]) |
|
1451 | 1477 | self.error_details[error_key] = dict( |
|
1452 | 1478 | details=details, |
|
1453 | 1479 | error_type=error_type, |
|
1454 | 1480 | message=message |
|
1455 | 1481 | ) |
|
1456 | 1482 | |
|
1457 | 1483 | @classmethod |
|
1458 | 1484 | def validate(cls, pull_request, user, fail_early=False, translator=None): |
|
1459 | 1485 | # if migrated to pyramid... |
|
1460 | 1486 | # _ = lambda: translator or _ # use passed in translator if any |
|
1461 | 1487 | |
|
1462 | 1488 | merge_check = cls() |
|
1463 | 1489 | |
|
1464 | 1490 | # permissions to merge |
|
1465 | 1491 | user_allowed_to_merge = PullRequestModel().check_user_merge( |
|
1466 | 1492 | pull_request, user) |
|
1467 | 1493 | if not user_allowed_to_merge: |
|
1468 | 1494 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
1469 | 1495 | |
|
1470 | 1496 | msg = _('User `{}` not allowed to perform merge.').format(user.username) |
|
1471 | 1497 | merge_check.push_error('error', msg, cls.PERM_CHECK, user.username) |
|
1472 | 1498 | if fail_early: |
|
1473 | 1499 | return merge_check |
|
1474 | 1500 | |
|
1475 | 1501 | # review status, must be always present |
|
1476 | 1502 | review_status = pull_request.calculated_review_status() |
|
1477 | 1503 | merge_check.review_status = review_status |
|
1478 | 1504 | |
|
1479 | 1505 | status_approved = review_status == ChangesetStatus.STATUS_APPROVED |
|
1480 | 1506 | if not status_approved: |
|
1481 | 1507 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
1482 | 1508 | |
|
1483 | 1509 | msg = _('Pull request reviewer approval is pending.') |
|
1484 | 1510 | |
|
1485 | 1511 | merge_check.push_error( |
|
1486 | 1512 | 'warning', msg, cls.REVIEW_CHECK, review_status) |
|
1487 | 1513 | |
|
1488 | 1514 | if fail_early: |
|
1489 | 1515 | return merge_check |
|
1490 | 1516 | |
|
1491 | 1517 | # left over TODOs |
|
1492 | 1518 | todos = CommentsModel().get_unresolved_todos(pull_request) |
|
1493 | 1519 | if todos: |
|
1494 | 1520 | log.debug("MergeCheck: cannot merge, {} " |
|
1495 | 1521 | "unresolved todos left.".format(len(todos))) |
|
1496 | 1522 | |
|
1497 | 1523 | if len(todos) == 1: |
|
1498 | 1524 | msg = _('Cannot merge, {} TODO still not resolved.').format( |
|
1499 | 1525 | len(todos)) |
|
1500 | 1526 | else: |
|
1501 | 1527 | msg = _('Cannot merge, {} TODOs still not resolved.').format( |
|
1502 | 1528 | len(todos)) |
|
1503 | 1529 | |
|
1504 | 1530 | merge_check.push_error('warning', msg, cls.TODO_CHECK, todos) |
|
1505 | 1531 | |
|
1506 | 1532 | if fail_early: |
|
1507 | 1533 | return merge_check |
|
1508 | 1534 | |
|
1509 | 1535 | # merge possible |
|
1510 | 1536 | merge_status, msg = PullRequestModel().merge_status(pull_request) |
|
1511 | 1537 | merge_check.merge_possible = merge_status |
|
1512 | 1538 | merge_check.merge_msg = msg |
|
1513 | 1539 | if not merge_status: |
|
1514 | 1540 | log.debug( |
|
1515 | 1541 | "MergeCheck: cannot merge, pull request merge not possible.") |
|
1516 | 1542 | merge_check.push_error('warning', msg, cls.MERGE_CHECK, None) |
|
1517 | 1543 | |
|
1518 | 1544 | if fail_early: |
|
1519 | 1545 | return merge_check |
|
1520 | 1546 | |
|
1521 | 1547 | return merge_check |
|
1522 | 1548 | |
|
1523 | 1549 | |
|
1524 | 1550 | ChangeTuple = namedtuple('ChangeTuple', |
|
1525 | 1551 | ['added', 'common', 'removed', 'total']) |
|
1526 | 1552 | |
|
1527 | 1553 | FileChangeTuple = namedtuple('FileChangeTuple', |
|
1528 | 1554 | ['added', 'modified', 'removed']) |
@@ -1,210 +1,213 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import pytest |
|
22 | 22 | from rhodecode.model.db import User, UserIpMap |
|
23 | 23 | from rhodecode.model.permission import PermissionModel |
|
24 | 24 | from rhodecode.tests import ( |
|
25 | 25 | TestController, url, clear_all_caches, assert_session_flash) |
|
26 | 26 | |
|
27 | 27 | |
|
28 | 28 | class TestAdminPermissionsController(TestController): |
|
29 | 29 | |
|
30 | 30 | @pytest.fixture(scope='class', autouse=True) |
|
31 | 31 | def prepare(self, request): |
|
32 | 32 | # cleanup and reset to default permissions after |
|
33 | 33 | @request.addfinalizer |
|
34 | 34 | def cleanup(): |
|
35 | 35 | PermissionModel().create_default_user_permissions( |
|
36 | 36 | User.get_default_user(), force=True) |
|
37 | 37 | |
|
38 | 38 | def test_index_application(self): |
|
39 | 39 | self.log_user() |
|
40 | 40 | self.app.get(url('admin_permissions_application')) |
|
41 | 41 | |
|
42 | 42 | @pytest.mark.parametrize( |
|
43 | 43 | 'anonymous, default_register, default_register_message, default_password_reset,' |
|
44 | 44 | 'default_extern_activate, expect_error, expect_form_error', [ |
|
45 | 45 | (True, 'hg.register.none', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual', |
|
46 | 46 | False, False), |
|
47 | 47 | (True, 'hg.register.manual_activate', '', 'hg.password_reset.enabled', 'hg.extern_activate.auto', |
|
48 | 48 | False, False), |
|
49 | 49 | (True, 'hg.register.auto_activate', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual', |
|
50 | 50 | False, False), |
|
51 | 51 | (True, 'hg.register.auto_activate', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual', |
|
52 | 52 | False, False), |
|
53 | 53 | (True, 'hg.register.XXX', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual', |
|
54 | 54 | False, True), |
|
55 | 55 | (True, '', '', 'hg.password_reset.enabled', '', True, False), |
|
56 | 56 | ]) |
|
57 | 57 | def test_update_application_permissions( |
|
58 | 58 | self, anonymous, default_register, default_register_message, default_password_reset, |
|
59 | 59 | default_extern_activate, expect_error, expect_form_error): |
|
60 | 60 | |
|
61 | 61 | self.log_user() |
|
62 | 62 | |
|
63 | 63 | # TODO: anonymous access set here to False, breaks some other tests |
|
64 | 64 | params = { |
|
65 | 65 | 'csrf_token': self.csrf_token, |
|
66 | 66 | 'anonymous': anonymous, |
|
67 | 67 | 'default_register': default_register, |
|
68 | 68 | 'default_register_message': default_register_message, |
|
69 | 69 | 'default_password_reset': default_password_reset, |
|
70 | 70 | 'default_extern_activate': default_extern_activate, |
|
71 | 71 | } |
|
72 | 72 | response = self.app.post(url('admin_permissions_application'), |
|
73 | 73 | params=params) |
|
74 | 74 | if expect_form_error: |
|
75 | 75 | assert response.status_int == 200 |
|
76 | 76 | response.mustcontain('Value must be one of') |
|
77 | 77 | else: |
|
78 | 78 | if expect_error: |
|
79 | 79 | msg = 'Error occurred during update of permissions' |
|
80 | 80 | else: |
|
81 | 81 | msg = 'Application permissions updated successfully' |
|
82 | 82 | assert_session_flash(response, msg) |
|
83 | 83 | |
|
84 | 84 | def test_index_object(self): |
|
85 | 85 | self.log_user() |
|
86 | 86 | self.app.get(url('admin_permissions_object')) |
|
87 | 87 | |
|
88 | 88 | @pytest.mark.parametrize( |
|
89 | 89 | 'repo, repo_group, user_group, expect_error, expect_form_error', [ |
|
90 | 90 | ('repository.none', 'group.none', 'usergroup.none', False, False), |
|
91 | 91 | ('repository.read', 'group.read', 'usergroup.read', False, False), |
|
92 | 92 | ('repository.write', 'group.write', 'usergroup.write', |
|
93 | 93 | False, False), |
|
94 | 94 | ('repository.admin', 'group.admin', 'usergroup.admin', |
|
95 | 95 | False, False), |
|
96 | 96 | ('repository.XXX', 'group.admin', 'usergroup.admin', False, True), |
|
97 | 97 | ('', '', '', True, False), |
|
98 | 98 | ]) |
|
99 | 99 | def test_update_object_permissions(self, repo, repo_group, user_group, |
|
100 | 100 | expect_error, expect_form_error): |
|
101 | 101 | self.log_user() |
|
102 | 102 | |
|
103 | 103 | params = { |
|
104 | 104 | 'csrf_token': self.csrf_token, |
|
105 | 105 | 'default_repo_perm': repo, |
|
106 | 106 | 'overwrite_default_repo': False, |
|
107 | 107 | 'default_group_perm': repo_group, |
|
108 | 108 | 'overwrite_default_group': False, |
|
109 | 109 | 'default_user_group_perm': user_group, |
|
110 | 110 | 'overwrite_default_user_group': False, |
|
111 | 111 | } |
|
112 | 112 | response = self.app.post(url('admin_permissions_object'), |
|
113 | 113 | params=params) |
|
114 | 114 | if expect_form_error: |
|
115 | 115 | assert response.status_int == 200 |
|
116 | 116 | response.mustcontain('Value must be one of') |
|
117 | 117 | else: |
|
118 | 118 | if expect_error: |
|
119 | 119 | msg = 'Error occurred during update of permissions' |
|
120 | 120 | else: |
|
121 | 121 | msg = 'Object permissions updated successfully' |
|
122 | 122 | assert_session_flash(response, msg) |
|
123 | 123 | |
|
124 | 124 | def test_index_global(self): |
|
125 | 125 | self.log_user() |
|
126 | 126 | self.app.get(url('admin_permissions_global')) |
|
127 | 127 | |
|
128 | 128 | @pytest.mark.parametrize( |
|
129 | 129 | 'repo_create, repo_create_write, user_group_create, repo_group_create,' |
|
130 | 130 | 'fork_create, inherit_default_permissions, expect_error,' |
|
131 | 131 | 'expect_form_error', [ |
|
132 | 132 | ('hg.create.none', 'hg.create.write_on_repogroup.false', |
|
133 | 133 | 'hg.usergroup.create.false', 'hg.repogroup.create.false', |
|
134 | 134 | 'hg.fork.none', 'hg.inherit_default_perms.false', False, False), |
|
135 | 135 | ('hg.create.repository', 'hg.create.write_on_repogroup.true', |
|
136 | 136 | 'hg.usergroup.create.true', 'hg.repogroup.create.true', |
|
137 | 137 | 'hg.fork.repository', 'hg.inherit_default_perms.false', |
|
138 | 138 | False, False), |
|
139 | 139 | ('hg.create.XXX', 'hg.create.write_on_repogroup.true', |
|
140 | 140 | 'hg.usergroup.create.true', 'hg.repogroup.create.true', |
|
141 | 141 | 'hg.fork.repository', 'hg.inherit_default_perms.false', |
|
142 | 142 | False, True), |
|
143 | 143 | ('', '', '', '', '', '', True, False), |
|
144 | 144 | ]) |
|
145 | 145 | def test_update_global_permissions( |
|
146 | 146 | self, repo_create, repo_create_write, user_group_create, |
|
147 | 147 | repo_group_create, fork_create, inherit_default_permissions, |
|
148 | 148 | expect_error, expect_form_error): |
|
149 | 149 | self.log_user() |
|
150 | 150 | |
|
151 | 151 | params = { |
|
152 | 152 | 'csrf_token': self.csrf_token, |
|
153 | 153 | 'default_repo_create': repo_create, |
|
154 | 154 | 'default_repo_create_on_write': repo_create_write, |
|
155 | 155 | 'default_user_group_create': user_group_create, |
|
156 | 156 | 'default_repo_group_create': repo_group_create, |
|
157 | 157 | 'default_fork_create': fork_create, |
|
158 | 158 | 'default_inherit_default_permissions': inherit_default_permissions |
|
159 | 159 | } |
|
160 | 160 | response = self.app.post(url('admin_permissions_global'), |
|
161 | 161 | params=params) |
|
162 | 162 | if expect_form_error: |
|
163 | 163 | assert response.status_int == 200 |
|
164 | 164 | response.mustcontain('Value must be one of') |
|
165 | 165 | else: |
|
166 | 166 | if expect_error: |
|
167 | 167 | msg = 'Error occurred during update of permissions' |
|
168 | 168 | else: |
|
169 | 169 | msg = 'Global permissions updated successfully' |
|
170 | 170 | assert_session_flash(response, msg) |
|
171 | 171 | |
|
172 | 172 | def test_index_ips(self): |
|
173 | 173 | self.log_user() |
|
174 | 174 | response = self.app.get(url('admin_permissions_ips')) |
|
175 | 175 | # TODO: Test response... |
|
176 | 176 | response.mustcontain('All IP addresses are allowed') |
|
177 | 177 | |
|
178 | 178 | def test_add_delete_ips(self): |
|
179 | 179 | self.log_user() |
|
180 | 180 | clear_all_caches() |
|
181 | 181 | |
|
182 | 182 | # ADD |
|
183 | 183 | default_user_id = User.get_default_user().user_id |
|
184 | 184 | response = self.app.post( |
|
185 | 185 | url('edit_user_ips', user_id=default_user_id), |
|
186 | 186 | params={'new_ip': '127.0.0.0/24', '_method': 'put', |
|
187 | 187 | 'csrf_token': self.csrf_token}) |
|
188 | 188 | |
|
189 | 189 | response = self.app.get(url('admin_permissions_ips')) |
|
190 | 190 | response.mustcontain('127.0.0.0/24') |
|
191 | 191 | response.mustcontain('127.0.0.0 - 127.0.0.255') |
|
192 | 192 | |
|
193 | 193 | # DELETE |
|
194 | 194 | default_user_id = User.get_default_user().user_id |
|
195 | 195 | del_ip_id = UserIpMap.query().filter(UserIpMap.user_id == |
|
196 | 196 | default_user_id).first().ip_id |
|
197 | 197 | |
|
198 | 198 | response = self.app.post( |
|
199 | 199 | url('edit_user_ips', user_id=default_user_id), |
|
200 | 200 | params={'_method': 'delete', 'del_ip_id': del_ip_id, |
|
201 | 201 | 'csrf_token': self.csrf_token}) |
|
202 | ||
|
203 | assert_session_flash(response, 'Removed ip address from user whitelist') | |
|
204 | ||
|
202 | 205 | clear_all_caches() |
|
203 | 206 | response = self.app.get(url('admin_permissions_ips')) |
|
204 | 207 | response.mustcontain('All IP addresses are allowed') |
|
205 | 208 | response.mustcontain(no=['127.0.0.0/24']) |
|
206 | 209 | response.mustcontain(no=['127.0.0.0 - 127.0.0.255']) |
|
207 | 210 | |
|
208 | 211 | def test_index_overview(self): |
|
209 | 212 | self.log_user() |
|
210 | 213 | self.app.get(url('admin_permissions_overview')) |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now