##// END OF EJS Templates
audit-logs: implemented pull request and comment events.
marcink -
r1807:83e09901 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,113 +1,112 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.model.db import UserLog
24 24 from rhodecode.model.pull_request import PullRequestModel
25 25 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
26 26 from rhodecode.api.tests.utils import (
27 27 build_data, api_call, assert_error, assert_ok)
28 28
29 29
30 30 @pytest.mark.usefixtures("testuser_api", "app")
31 31 class TestClosePullRequest(object):
32 32
33 33 @pytest.mark.backends("git", "hg")
34 34 def test_api_close_pull_request(self, pr_util):
35 35 pull_request = pr_util.create_pull_request()
36 36 pull_request_id = pull_request.pull_request_id
37 37 author = pull_request.user_id
38 38 repo = pull_request.target_repo.repo_id
39 39 id_, params = build_data(
40 40 self.apikey, 'close_pull_request',
41 41 repoid=pull_request.target_repo.repo_name,
42 42 pullrequestid=pull_request.pull_request_id)
43 43 response = api_call(self.app, params)
44 44 expected = {
45 45 'pull_request_id': pull_request_id,
46 46 'close_status': 'Rejected',
47 47 'closed': True,
48 48 }
49 49 assert_ok(id_, expected, response.body)
50 action = 'user_closed_pull_request:%d' % pull_request_id
51 50 journal = UserLog.query()\
52 .filter(UserLog.user_id == author)\
51 .filter(UserLog.user_id == author) \
52 .order_by('user_log_id') \
53 53 .filter(UserLog.repository_id == repo)\
54 .filter(UserLog.action == action)\
55 54 .all()
56 assert len(journal) == 1
55 assert journal[-1].action == 'repo.pull_request.close'
57 56
58 57 @pytest.mark.backends("git", "hg")
59 58 def test_api_close_pull_request_already_closed_error(self, pr_util):
60 59 pull_request = pr_util.create_pull_request()
61 60 pull_request_id = pull_request.pull_request_id
62 61 pull_request_repo = pull_request.target_repo.repo_name
63 62 PullRequestModel().close_pull_request(
64 63 pull_request, pull_request.author)
65 64 id_, params = build_data(
66 65 self.apikey, 'close_pull_request',
67 66 repoid=pull_request_repo, pullrequestid=pull_request_id)
68 67 response = api_call(self.app, params)
69 68
70 69 expected = 'pull request `%s` is already closed' % pull_request_id
71 70 assert_error(id_, expected, given=response.body)
72 71
73 72 @pytest.mark.backends("git", "hg")
74 73 def test_api_close_pull_request_repo_error(self):
75 74 id_, params = build_data(
76 75 self.apikey, 'close_pull_request',
77 76 repoid=666, pullrequestid=1)
78 77 response = api_call(self.app, params)
79 78
80 79 expected = 'repository `666` does not exist'
81 80 assert_error(id_, expected, given=response.body)
82 81
83 82 @pytest.mark.backends("git", "hg")
84 83 def test_api_close_pull_request_non_admin_with_userid_error(self,
85 84 pr_util):
86 85 pull_request = pr_util.create_pull_request()
87 86 id_, params = build_data(
88 87 self.apikey_regular, 'close_pull_request',
89 88 repoid=pull_request.target_repo.repo_name,
90 89 pullrequestid=pull_request.pull_request_id,
91 90 userid=TEST_USER_ADMIN_LOGIN)
92 91 response = api_call(self.app, params)
93 92
94 93 expected = 'userid is not the same as your user'
95 94 assert_error(id_, expected, given=response.body)
96 95
97 96 @pytest.mark.backends("git", "hg")
98 97 def test_api_close_pull_request_no_perms_to_close(
99 98 self, user_util, pr_util):
100 99 user = user_util.create_user()
101 100 pull_request = pr_util.create_pull_request()
102 101
103 102 id_, params = build_data(
104 103 user.api_key, 'close_pull_request',
105 104 repoid=pull_request.target_repo.repo_name,
106 105 pullrequestid=pull_request.pull_request_id,)
107 106 response = api_call(self.app, params)
108 107
109 108 expected = ('pull request `%s` close failed, '
110 109 'no permission to close.') % pull_request.pull_request_id
111 110
112 111 response_json = response.json['error']
113 112 assert response_json == expected
@@ -1,209 +1,208 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.model.comment import CommentsModel
24 24 from rhodecode.model.db import UserLog
25 25 from rhodecode.model.pull_request import PullRequestModel
26 26 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
27 27 from rhodecode.api.tests.utils import (
28 28 build_data, api_call, assert_error, assert_ok)
29 29
30 30
31 31 @pytest.mark.usefixtures("testuser_api", "app")
32 32 class TestCommentPullRequest(object):
33 33 finalizers = []
34 34
35 35 def teardown_method(self, method):
36 36 if self.finalizers:
37 37 for finalizer in self.finalizers:
38 38 finalizer()
39 39 self.finalizers = []
40 40
41 41 @pytest.mark.backends("git", "hg")
42 42 def test_api_comment_pull_request(self, pr_util, no_notifications):
43 43 pull_request = pr_util.create_pull_request()
44 44 pull_request_id = pull_request.pull_request_id
45 45 author = pull_request.user_id
46 46 repo = pull_request.target_repo.repo_id
47 47 id_, params = build_data(
48 48 self.apikey, 'comment_pull_request',
49 49 repoid=pull_request.target_repo.repo_name,
50 50 pullrequestid=pull_request.pull_request_id,
51 51 message='test message')
52 52 response = api_call(self.app, params)
53 53 pull_request = PullRequestModel().get(pull_request.pull_request_id)
54 54
55 55 comments = CommentsModel().get_comments(
56 56 pull_request.target_repo.repo_id, pull_request=pull_request)
57 57
58 58 expected = {
59 59 'pull_request_id': pull_request.pull_request_id,
60 60 'comment_id': comments[-1].comment_id,
61 61 'status': {'given': None, 'was_changed': None}
62 62 }
63 63 assert_ok(id_, expected, response.body)
64 64
65 action = 'user_commented_pull_request:%d' % pull_request_id
66 65 journal = UserLog.query()\
67 66 .filter(UserLog.user_id == author)\
68 .filter(UserLog.repository_id == repo)\
69 .filter(UserLog.action == action)\
67 .filter(UserLog.repository_id == repo) \
68 .order_by('user_log_id') \
70 69 .all()
71 assert len(journal) == 2
70 assert journal[-1].action == 'repo.pull_request.comment.create'
72 71
73 72 @pytest.mark.backends("git", "hg")
74 73 def test_api_comment_pull_request_change_status(
75 74 self, pr_util, no_notifications):
76 75 pull_request = pr_util.create_pull_request()
77 76 pull_request_id = pull_request.pull_request_id
78 77 id_, params = build_data(
79 78 self.apikey, 'comment_pull_request',
80 79 repoid=pull_request.target_repo.repo_name,
81 80 pullrequestid=pull_request.pull_request_id,
82 81 status='rejected')
83 82 response = api_call(self.app, params)
84 83 pull_request = PullRequestModel().get(pull_request_id)
85 84
86 85 comments = CommentsModel().get_comments(
87 86 pull_request.target_repo.repo_id, pull_request=pull_request)
88 87 expected = {
89 88 'pull_request_id': pull_request.pull_request_id,
90 89 'comment_id': comments[-1].comment_id,
91 90 'status': {'given': 'rejected', 'was_changed': True}
92 91 }
93 92 assert_ok(id_, expected, response.body)
94 93
95 94 @pytest.mark.backends("git", "hg")
96 95 def test_api_comment_pull_request_change_status_with_specific_commit_id(
97 96 self, pr_util, no_notifications):
98 97 pull_request = pr_util.create_pull_request()
99 98 pull_request_id = pull_request.pull_request_id
100 99 latest_commit_id = 'test_commit'
101 100 # inject additional revision, to fail test the status change on
102 101 # non-latest commit
103 102 pull_request.revisions = pull_request.revisions + ['test_commit']
104 103
105 104 id_, params = build_data(
106 105 self.apikey, 'comment_pull_request',
107 106 repoid=pull_request.target_repo.repo_name,
108 107 pullrequestid=pull_request.pull_request_id,
109 108 status='approved', commit_id=latest_commit_id)
110 109 response = api_call(self.app, params)
111 110 pull_request = PullRequestModel().get(pull_request_id)
112 111
113 112 expected = {
114 113 'pull_request_id': pull_request.pull_request_id,
115 114 'comment_id': None,
116 115 'status': {'given': 'approved', 'was_changed': False}
117 116 }
118 117 assert_ok(id_, expected, response.body)
119 118
120 119 @pytest.mark.backends("git", "hg")
121 120 def test_api_comment_pull_request_change_status_with_specific_commit_id(
122 121 self, pr_util, no_notifications):
123 122 pull_request = pr_util.create_pull_request()
124 123 pull_request_id = pull_request.pull_request_id
125 124 latest_commit_id = pull_request.revisions[0]
126 125
127 126 id_, params = build_data(
128 127 self.apikey, 'comment_pull_request',
129 128 repoid=pull_request.target_repo.repo_name,
130 129 pullrequestid=pull_request.pull_request_id,
131 130 status='approved', commit_id=latest_commit_id)
132 131 response = api_call(self.app, params)
133 132 pull_request = PullRequestModel().get(pull_request_id)
134 133
135 134 comments = CommentsModel().get_comments(
136 135 pull_request.target_repo.repo_id, pull_request=pull_request)
137 136 expected = {
138 137 'pull_request_id': pull_request.pull_request_id,
139 138 'comment_id': comments[-1].comment_id,
140 139 'status': {'given': 'approved', 'was_changed': True}
141 140 }
142 141 assert_ok(id_, expected, response.body)
143 142
144 143 @pytest.mark.backends("git", "hg")
145 144 def test_api_comment_pull_request_missing_params_error(self, pr_util):
146 145 pull_request = pr_util.create_pull_request()
147 146 pull_request_id = pull_request.pull_request_id
148 147 pull_request_repo = pull_request.target_repo.repo_name
149 148 id_, params = build_data(
150 149 self.apikey, 'comment_pull_request',
151 150 repoid=pull_request_repo,
152 151 pullrequestid=pull_request_id)
153 152 response = api_call(self.app, params)
154 153
155 154 expected = 'Both message and status parameters are missing. At least one is required.'
156 155 assert_error(id_, expected, given=response.body)
157 156
158 157 @pytest.mark.backends("git", "hg")
159 158 def test_api_comment_pull_request_unknown_status_error(self, pr_util):
160 159 pull_request = pr_util.create_pull_request()
161 160 pull_request_id = pull_request.pull_request_id
162 161 pull_request_repo = pull_request.target_repo.repo_name
163 162 id_, params = build_data(
164 163 self.apikey, 'comment_pull_request',
165 164 repoid=pull_request_repo,
166 165 pullrequestid=pull_request_id,
167 166 status='42')
168 167 response = api_call(self.app, params)
169 168
170 169 expected = 'Unknown comment status: `42`'
171 170 assert_error(id_, expected, given=response.body)
172 171
173 172 @pytest.mark.backends("git", "hg")
174 173 def test_api_comment_pull_request_repo_error(self):
175 174 id_, params = build_data(
176 175 self.apikey, 'comment_pull_request',
177 176 repoid=666, pullrequestid=1)
178 177 response = api_call(self.app, params)
179 178
180 179 expected = 'repository `666` does not exist'
181 180 assert_error(id_, expected, given=response.body)
182 181
183 182 @pytest.mark.backends("git", "hg")
184 183 def test_api_comment_pull_request_non_admin_with_userid_error(
185 184 self, pr_util):
186 185 pull_request = pr_util.create_pull_request()
187 186 id_, params = build_data(
188 187 self.apikey_regular, 'comment_pull_request',
189 188 repoid=pull_request.target_repo.repo_name,
190 189 pullrequestid=pull_request.pull_request_id,
191 190 userid=TEST_USER_ADMIN_LOGIN)
192 191 response = api_call(self.app, params)
193 192
194 193 expected = 'userid is not the same as your user'
195 194 assert_error(id_, expected, given=response.body)
196 195
197 196 @pytest.mark.backends("git", "hg")
198 197 def test_api_comment_pull_request_wrong_commit_id_error(self, pr_util):
199 198 pull_request = pr_util.create_pull_request()
200 199 id_, params = build_data(
201 200 self.apikey_regular, 'comment_pull_request',
202 201 repoid=pull_request.target_repo.repo_name,
203 202 status='approved',
204 203 pullrequestid=pull_request.pull_request_id,
205 204 commit_id='XXX')
206 205 response = api_call(self.app, params)
207 206
208 207 expected = 'Invalid commit_id `XXX` for this pull request.'
209 208 assert_error(id_, expected, given=response.body)
@@ -1,134 +1,134 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import pytest
23 23 import urlobject
24 24 from pylons import url
25 25
26 26 from rhodecode.api.tests.utils import (
27 27 build_data, api_call, assert_error, assert_ok)
28 28 from rhodecode.lib.utils2 import safe_unicode
29 29
30 30 pytestmark = pytest.mark.backends("git", "hg")
31 31
32 32
33 33 @pytest.mark.usefixtures("testuser_api", "app")
34 34 class TestGetPullRequest(object):
35 35
36 def test_api_get_pull_request(self, pr_util, http_host_stub, http_host_only_stub):
36 def test_api_get_pull_request(self, pr_util, http_host_only_stub):
37 37 from rhodecode.model.pull_request import PullRequestModel
38 38 pull_request = pr_util.create_pull_request(mergeable=True)
39 39 id_, params = build_data(
40 40 self.apikey, 'get_pull_request',
41 41 repoid=pull_request.target_repo.repo_name,
42 42 pullrequestid=pull_request.pull_request_id)
43 43
44 44 response = api_call(self.app, params)
45 45
46 46 assert response.status == '200 OK'
47 47
48 48 url_obj = urlobject.URLObject(
49 49 url(
50 50 'pullrequest_show',
51 51 repo_name=pull_request.target_repo.repo_name,
52 52 pull_request_id=pull_request.pull_request_id, qualified=True))
53 53
54 54 pr_url = safe_unicode(
55 url_obj.with_netloc(http_host_stub))
55 url_obj.with_netloc(http_host_only_stub))
56 56 source_url = safe_unicode(
57 57 pull_request.source_repo.clone_url().with_netloc(http_host_only_stub))
58 58 target_url = safe_unicode(
59 59 pull_request.target_repo.clone_url().with_netloc(http_host_only_stub))
60 60 shadow_url = safe_unicode(
61 61 PullRequestModel().get_shadow_clone_url(pull_request))
62 62
63 63 expected = {
64 64 'pull_request_id': pull_request.pull_request_id,
65 65 'url': pr_url,
66 66 'title': pull_request.title,
67 67 'description': pull_request.description,
68 68 'status': pull_request.status,
69 69 'created_on': pull_request.created_on,
70 70 'updated_on': pull_request.updated_on,
71 71 'commit_ids': pull_request.revisions,
72 72 'review_status': pull_request.calculated_review_status(),
73 73 'mergeable': {
74 74 'status': True,
75 75 'message': 'This pull request can be automatically merged.',
76 76 },
77 77 'source': {
78 78 'clone_url': source_url,
79 79 'repository': pull_request.source_repo.repo_name,
80 80 'reference': {
81 81 'name': pull_request.source_ref_parts.name,
82 82 'type': pull_request.source_ref_parts.type,
83 83 'commit_id': pull_request.source_ref_parts.commit_id,
84 84 },
85 85 },
86 86 'target': {
87 87 'clone_url': target_url,
88 88 'repository': pull_request.target_repo.repo_name,
89 89 'reference': {
90 90 'name': pull_request.target_ref_parts.name,
91 91 'type': pull_request.target_ref_parts.type,
92 92 'commit_id': pull_request.target_ref_parts.commit_id,
93 93 },
94 94 },
95 95 'merge': {
96 96 'clone_url': shadow_url,
97 97 'reference': {
98 98 'name': pull_request.shadow_merge_ref.name,
99 99 'type': pull_request.shadow_merge_ref.type,
100 100 'commit_id': pull_request.shadow_merge_ref.commit_id,
101 101 },
102 102 },
103 103 'author': pull_request.author.get_api_data(include_secrets=False,
104 104 details='basic'),
105 105 'reviewers': [
106 106 {
107 107 'user': reviewer.get_api_data(include_secrets=False,
108 108 details='basic'),
109 109 'reasons': reasons,
110 110 'review_status': st[0][1].status if st else 'not_reviewed',
111 111 }
112 112 for reviewer, reasons, mandatory, st in
113 113 pull_request.reviewers_statuses()
114 114 ]
115 115 }
116 116 assert_ok(id_, expected, response.body)
117 117
118 118 def test_api_get_pull_request_repo_error(self):
119 119 id_, params = build_data(
120 120 self.apikey, 'get_pull_request',
121 121 repoid=666, pullrequestid=1)
122 122 response = api_call(self.app, params)
123 123
124 124 expected = 'repository `666` does not exist'
125 125 assert_error(id_, expected, given=response.body)
126 126
127 127 def test_api_get_pull_request_pull_request_error(self):
128 128 id_, params = build_data(
129 129 self.apikey, 'get_pull_request',
130 130 repoid=1, pullrequestid=666)
131 131 response = api_call(self.app, params)
132 132
133 133 expected = 'pull request `666` does not exist'
134 134 assert_error(id_, expected, given=response.body)
@@ -1,136 +1,136 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.model.db import UserLog, PullRequest
24 24 from rhodecode.model.meta import Session
25 25 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
26 26 from rhodecode.api.tests.utils import (
27 27 build_data, api_call, assert_error, assert_ok)
28 28
29 29
30 30 @pytest.mark.usefixtures("testuser_api", "app")
31 31 class TestMergePullRequest(object):
32 32 @pytest.mark.backends("git", "hg")
33 33 def test_api_merge_pull_request_merge_failed(self, pr_util, no_notifications):
34 34 pull_request = pr_util.create_pull_request(mergeable=True)
35 35 author = pull_request.user_id
36 36 repo = pull_request.target_repo.repo_id
37 37 pull_request_id = pull_request.pull_request_id
38 38 pull_request_repo = pull_request.target_repo.repo_name
39 39
40 40 id_, params = build_data(
41 41 self.apikey, 'merge_pull_request',
42 42 repoid=pull_request_repo,
43 43 pullrequestid=pull_request_id)
44 44
45 45 response = api_call(self.app, params)
46 46
47 47 # The above api call detaches the pull request DB object from the
48 48 # session because of an unconditional transaction rollback in our
49 49 # middleware. Therefore we need to add it back here if we want to use
50 50 # it.
51 51 Session().add(pull_request)
52 52
53 53 expected = 'merge not possible for following reasons: ' \
54 54 'Pull request reviewer approval is pending.'
55 55 assert_error(id_, expected, given=response.body)
56 56
57 57 @pytest.mark.backends("git", "hg")
58 58 def test_api_merge_pull_request(self, pr_util, no_notifications):
59 59 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
60 60 author = pull_request.user_id
61 61 repo = pull_request.target_repo.repo_id
62 62 pull_request_id = pull_request.pull_request_id
63 63 pull_request_repo = pull_request.target_repo.repo_name
64 64
65 65 id_, params = build_data(
66 66 self.apikey, 'comment_pull_request',
67 67 repoid=pull_request_repo,
68 68 pullrequestid=pull_request_id,
69 69 status='approved')
70 70
71 71 response = api_call(self.app, params)
72 72 expected = {
73 73 'comment_id': response.json.get('result', {}).get('comment_id'),
74 74 'pull_request_id': pull_request_id,
75 75 'status': {'given': 'approved', 'was_changed': True}
76 76 }
77 77 assert_ok(id_, expected, given=response.body)
78 78
79 79 id_, params = build_data(
80 80 self.apikey, 'merge_pull_request',
81 81 repoid=pull_request_repo,
82 82 pullrequestid=pull_request_id)
83 83
84 84 response = api_call(self.app, params)
85 85
86 86 pull_request = PullRequest.get(pull_request_id)
87 87
88 88 expected = {
89 89 'executed': True,
90 90 'failure_reason': 0,
91 91 'possible': True,
92 92 'merge_commit_id': pull_request.shadow_merge_ref.commit_id,
93 93 'merge_ref': pull_request.shadow_merge_ref._asdict()
94 94 }
95 95
96 96 assert_ok(id_, expected, response.body)
97 97
98 action = 'user_merged_pull_request:%d' % (pull_request_id, )
99 98 journal = UserLog.query()\
100 99 .filter(UserLog.user_id == author)\
101 .filter(UserLog.repository_id == repo)\
102 .filter(UserLog.action == action)\
100 .filter(UserLog.repository_id == repo) \
101 .order_by('user_log_id') \
103 102 .all()
104 assert len(journal) == 1
103 assert journal[-2].action == 'repo.pull_request.merge'
104 assert journal[-1].action == 'repo.pull_request.close'
105 105
106 106 id_, params = build_data(
107 107 self.apikey, 'merge_pull_request',
108 108 repoid=pull_request_repo, pullrequestid=pull_request_id)
109 109 response = api_call(self.app, params)
110 110
111 111 expected = 'merge not possible for following reasons: This pull request is closed.'
112 112 assert_error(id_, expected, given=response.body)
113 113
114 114 @pytest.mark.backends("git", "hg")
115 115 def test_api_merge_pull_request_repo_error(self):
116 116 id_, params = build_data(
117 117 self.apikey, 'merge_pull_request',
118 118 repoid=666, pullrequestid=1)
119 119 response = api_call(self.app, params)
120 120
121 121 expected = 'repository `666` does not exist'
122 122 assert_error(id_, expected, given=response.body)
123 123
124 124 @pytest.mark.backends("git", "hg")
125 125 def test_api_merge_pull_request_non_admin_with_userid_error(self,
126 126 pr_util):
127 127 pull_request = pr_util.create_pull_request(mergeable=True)
128 128 id_, params = build_data(
129 129 self.apikey_regular, 'merge_pull_request',
130 130 repoid=pull_request.target_repo.repo_name,
131 131 pullrequestid=pull_request.pull_request_id,
132 132 userid=TEST_USER_ADMIN_LOGIN)
133 133 response = api_call(self.app, params)
134 134
135 135 expected = 'userid is not the same as your user'
136 136 assert_error(id_, expected, given=response.body)
@@ -1,213 +1,212 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.lib.vcs.nodes import FileNode
24 24 from rhodecode.model.db import User
25 25 from rhodecode.model.pull_request import PullRequestModel
26 26 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
27 27 from rhodecode.api.tests.utils import (
28 28 build_data, api_call, assert_ok, assert_error)
29 29
30 30
31 31 @pytest.mark.usefixtures("testuser_api", "app")
32 32 class TestUpdatePullRequest(object):
33 33
34 34 @pytest.mark.backends("git", "hg")
35 35 def test_api_update_pull_request_title_or_description(
36 self, pr_util, silence_action_logger, no_notifications):
36 self, pr_util, no_notifications):
37 37 pull_request = pr_util.create_pull_request()
38 38
39 39 id_, params = build_data(
40 40 self.apikey, 'update_pull_request',
41 41 repoid=pull_request.target_repo.repo_name,
42 42 pullrequestid=pull_request.pull_request_id,
43 43 title='New TITLE OF A PR',
44 44 description='New DESC OF A PR',
45 45 )
46 46 response = api_call(self.app, params)
47 47
48 48 expected = {
49 49 "msg": "Updated pull request `{}`".format(
50 50 pull_request.pull_request_id),
51 51 "pull_request": response.json['result']['pull_request'],
52 52 "updated_commits": {"added": [], "common": [], "removed": []},
53 53 "updated_reviewers": {"added": [], "removed": []},
54 54 }
55 55
56 56 response_json = response.json['result']
57 57 assert response_json == expected
58 58 pr = response_json['pull_request']
59 59 assert pr['title'] == 'New TITLE OF A PR'
60 60 assert pr['description'] == 'New DESC OF A PR'
61 61
62 62 @pytest.mark.backends("git", "hg")
63 63 def test_api_try_update_closed_pull_request(
64 self, pr_util, silence_action_logger, no_notifications):
64 self, pr_util, no_notifications):
65 65 pull_request = pr_util.create_pull_request()
66 66 PullRequestModel().close_pull_request(
67 67 pull_request, TEST_USER_ADMIN_LOGIN)
68 68
69 69 id_, params = build_data(
70 70 self.apikey, 'update_pull_request',
71 71 repoid=pull_request.target_repo.repo_name,
72 72 pullrequestid=pull_request.pull_request_id)
73 73 response = api_call(self.app, params)
74 74
75 75 expected = 'pull request `{}` update failed, pull request ' \
76 76 'is closed'.format(pull_request.pull_request_id)
77 77
78 78 assert_error(id_, expected, response.body)
79 79
80 80 @pytest.mark.backends("git", "hg")
81 def test_api_update_update_commits(
82 self, pr_util, silence_action_logger, no_notifications):
81 def test_api_update_update_commits(self, pr_util, no_notifications):
83 82 commits = [
84 83 {'message': 'a'},
85 84 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
86 85 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
87 86 ]
88 87 pull_request = pr_util.create_pull_request(
89 88 commits=commits, target_head='a', source_head='b', revisions=['b'])
90 89 pr_util.update_source_repository(head='c')
91 90 repo = pull_request.source_repo.scm_instance()
92 91 commits = [x for x in repo.get_commits()]
93 92 print commits
94 93
95 94 added_commit_id = commits[-1].raw_id # c commit
96 95 common_commit_id = commits[1].raw_id # b commit is common ancestor
97 96 total_commits = [added_commit_id, common_commit_id]
98 97
99 98 id_, params = build_data(
100 99 self.apikey, 'update_pull_request',
101 100 repoid=pull_request.target_repo.repo_name,
102 101 pullrequestid=pull_request.pull_request_id,
103 102 update_commits=True
104 103 )
105 104 response = api_call(self.app, params)
106 105
107 106 expected = {
108 107 "msg": "Updated pull request `{}`".format(
109 108 pull_request.pull_request_id),
110 109 "pull_request": response.json['result']['pull_request'],
111 110 "updated_commits": {"added": [added_commit_id],
112 111 "common": [common_commit_id],
113 112 "total": total_commits,
114 113 "removed": []},
115 114 "updated_reviewers": {"added": [], "removed": []},
116 115 }
117 116
118 117 assert_ok(id_, expected, response.body)
119 118
120 119 @pytest.mark.backends("git", "hg")
121 120 def test_api_update_change_reviewers(
122 self, user_util, pr_util, silence_action_logger, no_notifications):
121 self, user_util, pr_util, no_notifications):
123 122 a = user_util.create_user()
124 123 b = user_util.create_user()
125 124 c = user_util.create_user()
126 125 new_reviewers = [
127 126 {'username': b.username,'reasons': ['updated via API'],
128 127 'mandatory':False},
129 128 {'username': c.username, 'reasons': ['updated via API'],
130 129 'mandatory':False},
131 130 ]
132 131
133 132 added = [b.username, c.username]
134 133 removed = [a.username]
135 134
136 135 pull_request = pr_util.create_pull_request(
137 136 reviewers=[(a.username, ['added via API'], False)])
138 137
139 138 id_, params = build_data(
140 139 self.apikey, 'update_pull_request',
141 140 repoid=pull_request.target_repo.repo_name,
142 141 pullrequestid=pull_request.pull_request_id,
143 142 reviewers=new_reviewers)
144 143 response = api_call(self.app, params)
145 144 expected = {
146 145 "msg": "Updated pull request `{}`".format(
147 146 pull_request.pull_request_id),
148 147 "pull_request": response.json['result']['pull_request'],
149 148 "updated_commits": {"added": [], "common": [], "removed": []},
150 149 "updated_reviewers": {"added": added, "removed": removed},
151 150 }
152 151
153 152 assert_ok(id_, expected, response.body)
154 153
155 154 @pytest.mark.backends("git", "hg")
156 155 def test_api_update_bad_user_in_reviewers(self, pr_util):
157 156 pull_request = pr_util.create_pull_request()
158 157
159 158 id_, params = build_data(
160 159 self.apikey, 'update_pull_request',
161 160 repoid=pull_request.target_repo.repo_name,
162 161 pullrequestid=pull_request.pull_request_id,
163 162 reviewers=[{'username': 'bad_name'}])
164 163 response = api_call(self.app, params)
165 164
166 165 expected = 'user `bad_name` does not exist'
167 166
168 167 assert_error(id_, expected, response.body)
169 168
170 169 @pytest.mark.backends("git", "hg")
171 170 def test_api_update_repo_error(self, pr_util):
172 171 id_, params = build_data(
173 172 self.apikey, 'update_pull_request',
174 173 repoid='fake',
175 174 pullrequestid='fake',
176 175 reviewers=[{'username': 'bad_name'}])
177 176 response = api_call(self.app, params)
178 177
179 178 expected = 'repository `fake` does not exist'
180 179
181 180 response_json = response.json['error']
182 181 assert response_json == expected
183 182
184 183 @pytest.mark.backends("git", "hg")
185 184 def test_api_update_pull_request_error(self, pr_util):
186 185 pull_request = pr_util.create_pull_request()
187 186
188 187 id_, params = build_data(
189 188 self.apikey, 'update_pull_request',
190 189 repoid=pull_request.target_repo.repo_name,
191 190 pullrequestid=999999,
192 191 reviewers=[{'username': 'bad_name'}])
193 192 response = api_call(self.app, params)
194 193
195 194 expected = 'pull request `999999` does not exist'
196 195 assert_error(id_, expected, response.body)
197 196
198 197 @pytest.mark.backends("git", "hg")
199 198 def test_api_update_pull_request_no_perms_to_update(
200 199 self, user_util, pr_util):
201 200 user = user_util.create_user()
202 201 pull_request = pr_util.create_pull_request()
203 202
204 203 id_, params = build_data(
205 204 user.api_key, 'update_pull_request',
206 205 repoid=pull_request.target_repo.repo_name,
207 206 pullrequestid=pull_request.pull_request_id,)
208 207 response = api_call(self.app, params)
209 208
210 209 expected = ('pull request `%s` update failed, '
211 210 'no permission to update.') % pull_request.pull_request_id
212 211
213 212 assert_error(id_, expected, response.body)
@@ -1,779 +1,779 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23
24 24 from rhodecode import events
25 25 from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError
26 26 from rhodecode.api.utils import (
27 27 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
28 28 get_pull_request_or_error, get_commit_or_error, get_user_or_error,
29 29 validate_repo_permissions, resolve_ref_or_error)
30 30 from rhodecode.lib.auth import (HasRepoPermissionAnyApi)
31 31 from rhodecode.lib.base import vcs_operation_context
32 32 from rhodecode.lib.utils2 import str2bool
33 33 from rhodecode.model.changeset_status import ChangesetStatusModel
34 34 from rhodecode.model.comment import CommentsModel
35 35 from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment
36 36 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
37 37 from rhodecode.model.settings import SettingsModel
38 38 from rhodecode.model.validation_schema import Invalid
39 39 from rhodecode.model.validation_schema.schemas.reviewer_schema import(
40 40 ReviewerListSchema)
41 41
42 42 log = logging.getLogger(__name__)
43 43
44 44
45 45 @jsonrpc_method()
46 46 def get_pull_request(request, apiuser, repoid, pullrequestid):
47 47 """
48 48 Get a pull request based on the given ID.
49 49
50 50 :param apiuser: This is filled automatically from the |authtoken|.
51 51 :type apiuser: AuthUser
52 52 :param repoid: Repository name or repository ID from where the pull
53 53 request was opened.
54 54 :type repoid: str or int
55 55 :param pullrequestid: ID of the requested pull request.
56 56 :type pullrequestid: int
57 57
58 58 Example output:
59 59
60 60 .. code-block:: bash
61 61
62 62 "id": <id_given_in_input>,
63 63 "result":
64 64 {
65 65 "pull_request_id": "<pull_request_id>",
66 66 "url": "<url>",
67 67 "title": "<title>",
68 68 "description": "<description>",
69 69 "status" : "<status>",
70 70 "created_on": "<date_time_created>",
71 71 "updated_on": "<date_time_updated>",
72 72 "commit_ids": [
73 73 ...
74 74 "<commit_id>",
75 75 "<commit_id>",
76 76 ...
77 77 ],
78 78 "review_status": "<review_status>",
79 79 "mergeable": {
80 80 "status": "<bool>",
81 81 "message": "<message>",
82 82 },
83 83 "source": {
84 84 "clone_url": "<clone_url>",
85 85 "repository": "<repository_name>",
86 86 "reference":
87 87 {
88 88 "name": "<name>",
89 89 "type": "<type>",
90 90 "commit_id": "<commit_id>",
91 91 }
92 92 },
93 93 "target": {
94 94 "clone_url": "<clone_url>",
95 95 "repository": "<repository_name>",
96 96 "reference":
97 97 {
98 98 "name": "<name>",
99 99 "type": "<type>",
100 100 "commit_id": "<commit_id>",
101 101 }
102 102 },
103 103 "merge": {
104 104 "clone_url": "<clone_url>",
105 105 "reference":
106 106 {
107 107 "name": "<name>",
108 108 "type": "<type>",
109 109 "commit_id": "<commit_id>",
110 110 }
111 111 },
112 112 "author": <user_obj>,
113 113 "reviewers": [
114 114 ...
115 115 {
116 116 "user": "<user_obj>",
117 117 "review_status": "<review_status>",
118 118 }
119 119 ...
120 120 ]
121 121 },
122 122 "error": null
123 123 """
124 124 get_repo_or_error(repoid)
125 125 pull_request = get_pull_request_or_error(pullrequestid)
126 126 if not PullRequestModel().check_user_read(
127 127 pull_request, apiuser, api=True):
128 128 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
129 129 data = pull_request.get_api_data()
130 130 return data
131 131
132 132
133 133 @jsonrpc_method()
134 134 def get_pull_requests(request, apiuser, repoid, status=Optional('new')):
135 135 """
136 136 Get all pull requests from the repository specified in `repoid`.
137 137
138 138 :param apiuser: This is filled automatically from the |authtoken|.
139 139 :type apiuser: AuthUser
140 140 :param repoid: Repository name or repository ID.
141 141 :type repoid: str or int
142 142 :param status: Only return pull requests with the specified status.
143 143 Valid options are.
144 144 * ``new`` (default)
145 145 * ``open``
146 146 * ``closed``
147 147 :type status: str
148 148
149 149 Example output:
150 150
151 151 .. code-block:: bash
152 152
153 153 "id": <id_given_in_input>,
154 154 "result":
155 155 [
156 156 ...
157 157 {
158 158 "pull_request_id": "<pull_request_id>",
159 159 "url": "<url>",
160 160 "title" : "<title>",
161 161 "description": "<description>",
162 162 "status": "<status>",
163 163 "created_on": "<date_time_created>",
164 164 "updated_on": "<date_time_updated>",
165 165 "commit_ids": [
166 166 ...
167 167 "<commit_id>",
168 168 "<commit_id>",
169 169 ...
170 170 ],
171 171 "review_status": "<review_status>",
172 172 "mergeable": {
173 173 "status": "<bool>",
174 174 "message: "<message>",
175 175 },
176 176 "source": {
177 177 "clone_url": "<clone_url>",
178 178 "reference":
179 179 {
180 180 "name": "<name>",
181 181 "type": "<type>",
182 182 "commit_id": "<commit_id>",
183 183 }
184 184 },
185 185 "target": {
186 186 "clone_url": "<clone_url>",
187 187 "reference":
188 188 {
189 189 "name": "<name>",
190 190 "type": "<type>",
191 191 "commit_id": "<commit_id>",
192 192 }
193 193 },
194 194 "merge": {
195 195 "clone_url": "<clone_url>",
196 196 "reference":
197 197 {
198 198 "name": "<name>",
199 199 "type": "<type>",
200 200 "commit_id": "<commit_id>",
201 201 }
202 202 },
203 203 "author": <user_obj>,
204 204 "reviewers": [
205 205 ...
206 206 {
207 207 "user": "<user_obj>",
208 208 "review_status": "<review_status>",
209 209 }
210 210 ...
211 211 ]
212 212 }
213 213 ...
214 214 ],
215 215 "error": null
216 216
217 217 """
218 218 repo = get_repo_or_error(repoid)
219 219 if not has_superadmin_permission(apiuser):
220 220 _perms = (
221 221 'repository.admin', 'repository.write', 'repository.read',)
222 222 validate_repo_permissions(apiuser, repoid, repo, _perms)
223 223
224 224 status = Optional.extract(status)
225 225 pull_requests = PullRequestModel().get_all(repo, statuses=[status])
226 226 data = [pr.get_api_data() for pr in pull_requests]
227 227 return data
228 228
229 229
230 230 @jsonrpc_method()
231 231 def merge_pull_request(
232 232 request, apiuser, repoid, pullrequestid,
233 233 userid=Optional(OAttr('apiuser'))):
234 234 """
235 235 Merge the pull request specified by `pullrequestid` into its target
236 236 repository.
237 237
238 238 :param apiuser: This is filled automatically from the |authtoken|.
239 239 :type apiuser: AuthUser
240 240 :param repoid: The Repository name or repository ID of the
241 241 target repository to which the |pr| is to be merged.
242 242 :type repoid: str or int
243 243 :param pullrequestid: ID of the pull request which shall be merged.
244 244 :type pullrequestid: int
245 245 :param userid: Merge the pull request as this user.
246 246 :type userid: Optional(str or int)
247 247
248 248 Example output:
249 249
250 250 .. code-block:: bash
251 251
252 252 "id": <id_given_in_input>,
253 253 "result": {
254 254 "executed": "<bool>",
255 255 "failure_reason": "<int>",
256 256 "merge_commit_id": "<merge_commit_id>",
257 257 "possible": "<bool>",
258 258 "merge_ref": {
259 259 "commit_id": "<commit_id>",
260 260 "type": "<type>",
261 261 "name": "<name>"
262 262 }
263 263 },
264 264 "error": null
265 265 """
266 266 repo = get_repo_or_error(repoid)
267 267 if not isinstance(userid, Optional):
268 268 if (has_superadmin_permission(apiuser) or
269 269 HasRepoPermissionAnyApi('repository.admin')(
270 270 user=apiuser, repo_name=repo.repo_name)):
271 271 apiuser = get_user_or_error(userid)
272 272 else:
273 273 raise JSONRPCError('userid is not the same as your user')
274 274
275 275 pull_request = get_pull_request_or_error(pullrequestid)
276 276
277 277 check = MergeCheck.validate(pull_request, user=apiuser)
278 278 merge_possible = not check.failed
279 279
280 280 if not merge_possible:
281 281 error_messages = []
282 282 for err_type, error_msg in check.errors:
283 283 error_msg = request.translate(error_msg)
284 284 error_messages.append(error_msg)
285 285
286 286 reasons = ','.join(error_messages)
287 287 raise JSONRPCError(
288 288 'merge not possible for following reasons: {}'.format(reasons))
289 289
290 290 target_repo = pull_request.target_repo
291 291 extras = vcs_operation_context(
292 292 request.environ, repo_name=target_repo.repo_name,
293 293 username=apiuser.username, action='push',
294 294 scm=target_repo.repo_type)
295 295 merge_response = PullRequestModel().merge(
296 296 pull_request, apiuser, extras=extras)
297 297 if merge_response.executed:
298 298 PullRequestModel().close_pull_request(
299 299 pull_request.pull_request_id, apiuser)
300 300
301 301 Session().commit()
302 302
303 303 # In previous versions the merge response directly contained the merge
304 304 # commit id. It is now contained in the merge reference object. To be
305 305 # backwards compatible we have to extract it again.
306 306 merge_response = merge_response._asdict()
307 307 merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id
308 308
309 309 return merge_response
310 310
311 311
312 312 @jsonrpc_method()
313 313 def comment_pull_request(
314 314 request, apiuser, repoid, pullrequestid, message=Optional(None),
315 315 commit_id=Optional(None), status=Optional(None),
316 316 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
317 317 resolves_comment_id=Optional(None),
318 318 userid=Optional(OAttr('apiuser'))):
319 319 """
320 320 Comment on the pull request specified with the `pullrequestid`,
321 321 in the |repo| specified by the `repoid`, and optionally change the
322 322 review status.
323 323
324 324 :param apiuser: This is filled automatically from the |authtoken|.
325 325 :type apiuser: AuthUser
326 326 :param repoid: The repository name or repository ID.
327 327 :type repoid: str or int
328 328 :param pullrequestid: The pull request ID.
329 329 :type pullrequestid: int
330 330 :param commit_id: Specify the commit_id for which to set a comment. If
331 331 given commit_id is different than latest in the PR status
332 332 change won't be performed.
333 333 :type commit_id: str
334 334 :param message: The text content of the comment.
335 335 :type message: str
336 336 :param status: (**Optional**) Set the approval status of the pull
337 337 request. One of: 'not_reviewed', 'approved', 'rejected',
338 338 'under_review'
339 339 :type status: str
340 340 :param comment_type: Comment type, one of: 'note', 'todo'
341 341 :type comment_type: Optional(str), default: 'note'
342 342 :param userid: Comment on the pull request as this user
343 343 :type userid: Optional(str or int)
344 344
345 345 Example output:
346 346
347 347 .. code-block:: bash
348 348
349 349 id : <id_given_in_input>
350 350 result : {
351 351 "pull_request_id": "<Integer>",
352 352 "comment_id": "<Integer>",
353 353 "status": {"given": <given_status>,
354 354 "was_changed": <bool status_was_actually_changed> },
355 355 },
356 356 error : null
357 357 """
358 358 repo = get_repo_or_error(repoid)
359 359 if not isinstance(userid, Optional):
360 360 if (has_superadmin_permission(apiuser) or
361 361 HasRepoPermissionAnyApi('repository.admin')(
362 362 user=apiuser, repo_name=repo.repo_name)):
363 363 apiuser = get_user_or_error(userid)
364 364 else:
365 365 raise JSONRPCError('userid is not the same as your user')
366 366
367 367 pull_request = get_pull_request_or_error(pullrequestid)
368 368 if not PullRequestModel().check_user_read(
369 369 pull_request, apiuser, api=True):
370 370 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
371 371 message = Optional.extract(message)
372 372 status = Optional.extract(status)
373 373 commit_id = Optional.extract(commit_id)
374 374 comment_type = Optional.extract(comment_type)
375 375 resolves_comment_id = Optional.extract(resolves_comment_id)
376 376
377 377 if not message and not status:
378 378 raise JSONRPCError(
379 379 'Both message and status parameters are missing. '
380 380 'At least one is required.')
381 381
382 382 if (status not in (st[0] for st in ChangesetStatus.STATUSES) and
383 383 status is not None):
384 384 raise JSONRPCError('Unknown comment status: `%s`' % status)
385 385
386 386 if commit_id and commit_id not in pull_request.revisions:
387 387 raise JSONRPCError(
388 388 'Invalid commit_id `%s` for this pull request.' % commit_id)
389 389
390 390 allowed_to_change_status = PullRequestModel().check_user_change_status(
391 391 pull_request, apiuser)
392 392
393 393 # if commit_id is passed re-validated if user is allowed to change status
394 394 # based on latest commit_id from the PR
395 395 if commit_id:
396 396 commit_idx = pull_request.revisions.index(commit_id)
397 397 if commit_idx != 0:
398 398 allowed_to_change_status = False
399 399
400 400 if resolves_comment_id:
401 401 comment = ChangesetComment.get(resolves_comment_id)
402 402 if not comment:
403 403 raise JSONRPCError(
404 404 'Invalid resolves_comment_id `%s` for this pull request.'
405 405 % resolves_comment_id)
406 406 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
407 407 raise JSONRPCError(
408 408 'Comment `%s` is wrong type for setting status to resolved.'
409 409 % resolves_comment_id)
410 410
411 411 text = message
412 412 status_label = ChangesetStatus.get_status_lbl(status)
413 413 if status and allowed_to_change_status:
414 414 st_message = ('Status change %(transition_icon)s %(status)s'
415 415 % {'transition_icon': '>', 'status': status_label})
416 416 text = message or st_message
417 417
418 418 rc_config = SettingsModel().get_all_settings()
419 419 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
420 420
421 421 status_change = status and allowed_to_change_status
422 422 comment = CommentsModel().create(
423 423 text=text,
424 424 repo=pull_request.target_repo.repo_id,
425 425 user=apiuser.user_id,
426 426 pull_request=pull_request.pull_request_id,
427 427 f_path=None,
428 428 line_no=None,
429 429 status_change=(status_label if status_change else None),
430 430 status_change_type=(status if status_change else None),
431 431 closing_pr=False,
432 432 renderer=renderer,
433 433 comment_type=comment_type,
434 434 resolves_comment_id=resolves_comment_id
435 435 )
436 436
437 437 if allowed_to_change_status and status:
438 438 ChangesetStatusModel().set_status(
439 439 pull_request.target_repo.repo_id,
440 440 status,
441 441 apiuser.user_id,
442 442 comment,
443 443 pull_request=pull_request.pull_request_id
444 444 )
445 445 Session().flush()
446 446
447 447 Session().commit()
448 448 data = {
449 449 'pull_request_id': pull_request.pull_request_id,
450 450 'comment_id': comment.comment_id if comment else None,
451 451 'status': {'given': status, 'was_changed': status_change},
452 452 }
453 453 return data
454 454
455 455
456 456 @jsonrpc_method()
457 457 def create_pull_request(
458 458 request, apiuser, source_repo, target_repo, source_ref, target_ref,
459 459 title, description=Optional(''), reviewers=Optional(None)):
460 460 """
461 461 Creates a new pull request.
462 462
463 463 Accepts refs in the following formats:
464 464
465 465 * branch:<branch_name>:<sha>
466 466 * branch:<branch_name>
467 467 * bookmark:<bookmark_name>:<sha> (Mercurial only)
468 468 * bookmark:<bookmark_name> (Mercurial only)
469 469
470 470 :param apiuser: This is filled automatically from the |authtoken|.
471 471 :type apiuser: AuthUser
472 472 :param source_repo: Set the source repository name.
473 473 :type source_repo: str
474 474 :param target_repo: Set the target repository name.
475 475 :type target_repo: str
476 476 :param source_ref: Set the source ref name.
477 477 :type source_ref: str
478 478 :param target_ref: Set the target ref name.
479 479 :type target_ref: str
480 480 :param title: Set the pull request title.
481 481 :type title: str
482 482 :param description: Set the pull request description.
483 483 :type description: Optional(str)
484 484 :param reviewers: Set the new pull request reviewers list.
485 485 Reviewer defined by review rules will be added automatically to the
486 486 defined list.
487 487 :type reviewers: Optional(list)
488 488 Accepts username strings or objects of the format:
489 489
490 490 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
491 491 """
492 492
493 493 source_db_repo = get_repo_or_error(source_repo)
494 494 target_db_repo = get_repo_or_error(target_repo)
495 495 if not has_superadmin_permission(apiuser):
496 496 _perms = ('repository.admin', 'repository.write', 'repository.read',)
497 497 validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms)
498 498
499 499 full_source_ref = resolve_ref_or_error(source_ref, source_db_repo)
500 500 full_target_ref = resolve_ref_or_error(target_ref, target_db_repo)
501 501 source_commit = get_commit_or_error(full_source_ref, source_db_repo)
502 502 target_commit = get_commit_or_error(full_target_ref, target_db_repo)
503 503 source_scm = source_db_repo.scm_instance()
504 504 target_scm = target_db_repo.scm_instance()
505 505
506 506 commit_ranges = target_scm.compare(
507 507 target_commit.raw_id, source_commit.raw_id, source_scm,
508 508 merge=True, pre_load=[])
509 509
510 510 ancestor = target_scm.get_common_ancestor(
511 511 target_commit.raw_id, source_commit.raw_id, source_scm)
512 512
513 513 if not commit_ranges:
514 514 raise JSONRPCError('no commits found')
515 515
516 516 if not ancestor:
517 517 raise JSONRPCError('no common ancestor found')
518 518
519 519 reviewer_objects = Optional.extract(reviewers) or []
520 520
521 521 if reviewer_objects:
522 522 schema = ReviewerListSchema()
523 523 try:
524 524 reviewer_objects = schema.deserialize(reviewer_objects)
525 525 except Invalid as err:
526 526 raise JSONRPCValidationError(colander_exc=err)
527 527
528 528 # validate users
529 529 for reviewer_object in reviewer_objects:
530 530 user = get_user_or_error(reviewer_object['username'])
531 531 reviewer_object['user_id'] = user.user_id
532 532
533 533 get_default_reviewers_data, get_validated_reviewers = \
534 534 PullRequestModel().get_reviewer_functions()
535 535
536 536 reviewer_rules = get_default_reviewers_data(
537 537 apiuser.get_instance(), source_db_repo,
538 538 source_commit, target_db_repo, target_commit)
539 539
540 540 # specified rules are later re-validated, thus we can assume users will
541 541 # eventually provide those that meet the reviewer criteria.
542 542 if not reviewer_objects:
543 543 reviewer_objects = reviewer_rules['reviewers']
544 544
545 545 try:
546 546 reviewers = get_validated_reviewers(
547 547 reviewer_objects, reviewer_rules)
548 548 except ValueError as e:
549 549 raise JSONRPCError('Reviewers Validation: {}'.format(e))
550 550
551 551 pull_request_model = PullRequestModel()
552 552 pull_request = pull_request_model.create(
553 553 created_by=apiuser.user_id,
554 554 source_repo=source_repo,
555 555 source_ref=full_source_ref,
556 556 target_repo=target_repo,
557 557 target_ref=full_target_ref,
558 558 revisions=reversed(
559 559 [commit.raw_id for commit in reversed(commit_ranges)]),
560 560 reviewers=reviewers,
561 561 title=title,
562 562 description=Optional.extract(description)
563 563 )
564 564
565 565 Session().commit()
566 566 data = {
567 567 'msg': 'Created new pull request `{}`'.format(title),
568 568 'pull_request_id': pull_request.pull_request_id,
569 569 }
570 570 return data
571 571
572 572
573 573 @jsonrpc_method()
574 574 def update_pull_request(
575 575 request, apiuser, repoid, pullrequestid, title=Optional(''),
576 576 description=Optional(''), reviewers=Optional(None),
577 577 update_commits=Optional(None)):
578 578 """
579 579 Updates a pull request.
580 580
581 581 :param apiuser: This is filled automatically from the |authtoken|.
582 582 :type apiuser: AuthUser
583 583 :param repoid: The repository name or repository ID.
584 584 :type repoid: str or int
585 585 :param pullrequestid: The pull request ID.
586 586 :type pullrequestid: int
587 587 :param title: Set the pull request title.
588 588 :type title: str
589 589 :param description: Update pull request description.
590 590 :type description: Optional(str)
591 591 :param reviewers: Update pull request reviewers list with new value.
592 592 :type reviewers: Optional(list)
593 593 Accepts username strings or objects of the format:
594 594
595 595 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
596 596
597 597 :param update_commits: Trigger update of commits for this pull request
598 598 :type: update_commits: Optional(bool)
599 599
600 600 Example output:
601 601
602 602 .. code-block:: bash
603 603
604 604 id : <id_given_in_input>
605 605 result : {
606 606 "msg": "Updated pull request `63`",
607 607 "pull_request": <pull_request_object>,
608 608 "updated_reviewers": {
609 609 "added": [
610 610 "username"
611 611 ],
612 612 "removed": []
613 613 },
614 614 "updated_commits": {
615 615 "added": [
616 616 "<sha1_hash>"
617 617 ],
618 618 "common": [
619 619 "<sha1_hash>",
620 620 "<sha1_hash>",
621 621 ],
622 622 "removed": []
623 623 }
624 624 }
625 625 error : null
626 626 """
627 627
628 628 repo = get_repo_or_error(repoid)
629 629 pull_request = get_pull_request_or_error(pullrequestid)
630 630 if not PullRequestModel().check_user_update(
631 631 pull_request, apiuser, api=True):
632 632 raise JSONRPCError(
633 633 'pull request `%s` update failed, no permission to update.' % (
634 634 pullrequestid,))
635 635 if pull_request.is_closed():
636 636 raise JSONRPCError(
637 637 'pull request `%s` update failed, pull request is closed' % (
638 638 pullrequestid,))
639 639
640 640 reviewer_objects = Optional.extract(reviewers) or []
641 641
642 642 if reviewer_objects:
643 643 schema = ReviewerListSchema()
644 644 try:
645 645 reviewer_objects = schema.deserialize(reviewer_objects)
646 646 except Invalid as err:
647 647 raise JSONRPCValidationError(colander_exc=err)
648 648
649 649 # validate users
650 650 for reviewer_object in reviewer_objects:
651 651 user = get_user_or_error(reviewer_object['username'])
652 652 reviewer_object['user_id'] = user.user_id
653 653
654 654 get_default_reviewers_data, get_validated_reviewers = \
655 655 PullRequestModel().get_reviewer_functions()
656 656
657 657 # re-use stored rules
658 658 reviewer_rules = pull_request.reviewer_data
659 659 try:
660 660 reviewers = get_validated_reviewers(
661 661 reviewer_objects, reviewer_rules)
662 662 except ValueError as e:
663 663 raise JSONRPCError('Reviewers Validation: {}'.format(e))
664 664 else:
665 665 reviewers = []
666 666
667 667 title = Optional.extract(title)
668 668 description = Optional.extract(description)
669 669 if title or description:
670 670 PullRequestModel().edit(
671 671 pull_request, title or pull_request.title,
672 description or pull_request.description)
672 description or pull_request.description, apiuser)
673 673 Session().commit()
674 674
675 675 commit_changes = {"added": [], "common": [], "removed": []}
676 676 if str2bool(Optional.extract(update_commits)):
677 677 if PullRequestModel().has_valid_update_type(pull_request):
678 678 update_response = PullRequestModel().update_commits(
679 679 pull_request)
680 680 commit_changes = update_response.changes or commit_changes
681 681 Session().commit()
682 682
683 683 reviewers_changes = {"added": [], "removed": []}
684 684 if reviewers:
685 685 added_reviewers, removed_reviewers = \
686 PullRequestModel().update_reviewers(pull_request, reviewers)
686 PullRequestModel().update_reviewers(pull_request, reviewers, apiuser)
687 687
688 688 reviewers_changes['added'] = sorted(
689 689 [get_user_or_error(n).username for n in added_reviewers])
690 690 reviewers_changes['removed'] = sorted(
691 691 [get_user_or_error(n).username for n in removed_reviewers])
692 692 Session().commit()
693 693
694 694 data = {
695 695 'msg': 'Updated pull request `{}`'.format(
696 696 pull_request.pull_request_id),
697 697 'pull_request': pull_request.get_api_data(),
698 698 'updated_commits': commit_changes,
699 699 'updated_reviewers': reviewers_changes
700 700 }
701 701
702 702 return data
703 703
704 704
705 705 @jsonrpc_method()
706 706 def close_pull_request(
707 707 request, apiuser, repoid, pullrequestid,
708 708 userid=Optional(OAttr('apiuser')), message=Optional('')):
709 709 """
710 710 Close the pull request specified by `pullrequestid`.
711 711
712 712 :param apiuser: This is filled automatically from the |authtoken|.
713 713 :type apiuser: AuthUser
714 714 :param repoid: Repository name or repository ID to which the pull
715 715 request belongs.
716 716 :type repoid: str or int
717 717 :param pullrequestid: ID of the pull request to be closed.
718 718 :type pullrequestid: int
719 719 :param userid: Close the pull request as this user.
720 720 :type userid: Optional(str or int)
721 721 :param message: Optional message to close the Pull Request with. If not
722 722 specified it will be generated automatically.
723 723 :type message: Optional(str)
724 724
725 725 Example output:
726 726
727 727 .. code-block:: bash
728 728
729 729 "id": <id_given_in_input>,
730 730 "result": {
731 731 "pull_request_id": "<int>",
732 732 "close_status": "<str:status_lbl>,
733 733 "closed": "<bool>"
734 734 },
735 735 "error": null
736 736
737 737 """
738 738 _ = request.translate
739 739
740 740 repo = get_repo_or_error(repoid)
741 741 if not isinstance(userid, Optional):
742 742 if (has_superadmin_permission(apiuser) or
743 743 HasRepoPermissionAnyApi('repository.admin')(
744 744 user=apiuser, repo_name=repo.repo_name)):
745 745 apiuser = get_user_or_error(userid)
746 746 else:
747 747 raise JSONRPCError('userid is not the same as your user')
748 748
749 749 pull_request = get_pull_request_or_error(pullrequestid)
750 750
751 751 if pull_request.is_closed():
752 752 raise JSONRPCError(
753 753 'pull request `%s` is already closed' % (pullrequestid,))
754 754
755 755 # only owner or admin or person with write permissions
756 756 allowed_to_close = PullRequestModel().check_user_update(
757 757 pull_request, apiuser, api=True)
758 758
759 759 if not allowed_to_close:
760 760 raise JSONRPCError(
761 761 'pull request `%s` close failed, no permission to close.' % (
762 762 pullrequestid,))
763 763
764 764 # message we're using to close the PR, else it's automatically generated
765 765 message = Optional.extract(message)
766 766
767 767 # finally close the PR, with proper message comment
768 768 comment, status = PullRequestModel().close_pull_request_with_comment(
769 769 pull_request, apiuser, repo, message=message)
770 770 status_lbl = ChangesetStatus.get_status_lbl(status)
771 771
772 772 Session().commit()
773 773
774 774 data = {
775 775 'pull_request_id': pull_request.pull_request_id,
776 776 'close_status': status_lbl,
777 777 'closed': True,
778 778 }
779 779 return data
@@ -1,643 +1,643 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Users crud controller for pylons
23 23 """
24 24
25 25 import logging
26 26 import formencode
27 27
28 28 from formencode import htmlfill
29 29 from pylons import request, tmpl_context as c, url, config
30 30 from pylons.controllers.util import redirect
31 31 from pylons.i18n.translation import _
32 32
33 33 from rhodecode.authentication.plugins import auth_rhodecode
34 34
35 35 from rhodecode.lib import helpers as h
36 36 from rhodecode.lib import auth
37 37 from rhodecode.lib import audit_logger
38 38 from rhodecode.lib.auth import (
39 39 LoginRequired, HasPermissionAllDecorator, AuthUser)
40 40 from rhodecode.lib.base import BaseController, render
41 41 from rhodecode.lib.exceptions import (
42 42 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
43 43 UserOwnsUserGroupsException, UserCreationError)
44 44 from rhodecode.lib.utils2 import safe_int, AttributeDict
45 45
46 46 from rhodecode.model.db import (
47 47 PullRequestReviewers, User, UserEmailMap, UserIpMap, RepoGroup)
48 48 from rhodecode.model.forms import (
49 49 UserForm, UserPermissionsForm, UserIndividualPermissionsForm)
50 50 from rhodecode.model.repo_group import RepoGroupModel
51 51 from rhodecode.model.user import UserModel
52 52 from rhodecode.model.meta import Session
53 53 from rhodecode.model.permission import PermissionModel
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 class UsersController(BaseController):
59 59 """REST Controller styled on the Atom Publishing Protocol"""
60 60
61 61 @LoginRequired()
62 62 def __before__(self):
63 63 super(UsersController, self).__before__()
64 64 c.available_permissions = config['available_permissions']
65 65 c.allowed_languages = [
66 66 ('en', 'English (en)'),
67 67 ('de', 'German (de)'),
68 68 ('fr', 'French (fr)'),
69 69 ('it', 'Italian (it)'),
70 70 ('ja', 'Japanese (ja)'),
71 71 ('pl', 'Polish (pl)'),
72 72 ('pt', 'Portuguese (pt)'),
73 73 ('ru', 'Russian (ru)'),
74 74 ('zh', 'Chinese (zh)'),
75 75 ]
76 76 PermissionModel().set_global_permission_choices(c, gettext_translator=_)
77 77
78 78 def _get_personal_repo_group_template_vars(self):
79 79 DummyUser = AttributeDict({
80 80 'username': '${username}',
81 81 'user_id': '${user_id}',
82 82 })
83 83 c.default_create_repo_group = RepoGroupModel() \
84 84 .get_default_create_personal_repo_group()
85 85 c.personal_repo_group_name = RepoGroupModel() \
86 86 .get_personal_group_name(DummyUser)
87 87
88 88 @HasPermissionAllDecorator('hg.admin')
89 89 @auth.CSRFRequired()
90 90 def create(self):
91 91 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.name
92 92 user_model = UserModel()
93 93 user_form = UserForm()()
94 94 try:
95 95 form_result = user_form.to_python(dict(request.POST))
96 96 user = user_model.create(form_result)
97 97 Session().flush()
98 98 creation_data = user.get_api_data()
99 99 username = form_result['username']
100 100
101 101 audit_logger.store_web(
102 102 'user.create', action_data={'data': creation_data},
103 103 user=c.rhodecode_user)
104 104
105 105 user_link = h.link_to(h.escape(username),
106 106 url('edit_user',
107 107 user_id=user.user_id))
108 108 h.flash(h.literal(_('Created user %(user_link)s')
109 109 % {'user_link': user_link}), category='success')
110 110 Session().commit()
111 111 except formencode.Invalid as errors:
112 112 self._get_personal_repo_group_template_vars()
113 113 return htmlfill.render(
114 114 render('admin/users/user_add.mako'),
115 115 defaults=errors.value,
116 116 errors=errors.error_dict or {},
117 117 prefix_error=False,
118 118 encoding="UTF-8",
119 119 force_defaults=False)
120 120 except UserCreationError as e:
121 121 h.flash(e, 'error')
122 122 except Exception:
123 123 log.exception("Exception creation of user")
124 124 h.flash(_('Error occurred during creation of user %s')
125 125 % request.POST.get('username'), category='error')
126 126 return redirect(h.route_path('users'))
127 127
128 128 @HasPermissionAllDecorator('hg.admin')
129 129 def new(self):
130 130 c.default_extern_type = auth_rhodecode.RhodeCodeAuthPlugin.name
131 131 self._get_personal_repo_group_template_vars()
132 132 return render('admin/users/user_add.mako')
133 133
134 134 @HasPermissionAllDecorator('hg.admin')
135 135 @auth.CSRFRequired()
136 136 def update(self, user_id):
137 137
138 138 user_id = safe_int(user_id)
139 139 c.user = User.get_or_404(user_id)
140 140 c.active = 'profile'
141 141 c.extern_type = c.user.extern_type
142 142 c.extern_name = c.user.extern_name
143 143 c.perm_user = AuthUser(user_id=user_id, ip_addr=self.ip_addr)
144 144 available_languages = [x[0] for x in c.allowed_languages]
145 145 _form = UserForm(edit=True, available_languages=available_languages,
146 146 old_data={'user_id': user_id,
147 147 'email': c.user.email})()
148 148 form_result = {}
149 149 old_values = c.user.get_api_data()
150 150 try:
151 151 form_result = _form.to_python(dict(request.POST))
152 152 skip_attrs = ['extern_type', 'extern_name']
153 153 # TODO: plugin should define if username can be updated
154 154 if c.extern_type != "rhodecode":
155 155 # forbid updating username for external accounts
156 156 skip_attrs.append('username')
157 157
158 158 UserModel().update_user(
159 159 user_id, skip_attrs=skip_attrs, **form_result)
160 160
161 161 audit_logger.store_web(
162 162 'user.edit', action_data={'old_data': old_values},
163 163 user=c.rhodecode_user)
164 164
165 165 Session().commit()
166 166 h.flash(_('User updated successfully'), category='success')
167 167 except formencode.Invalid as errors:
168 168 defaults = errors.value
169 169 e = errors.error_dict or {}
170 170
171 171 return htmlfill.render(
172 172 render('admin/users/user_edit.mako'),
173 173 defaults=defaults,
174 174 errors=e,
175 175 prefix_error=False,
176 176 encoding="UTF-8",
177 177 force_defaults=False)
178 178 except UserCreationError as e:
179 179 h.flash(e, 'error')
180 180 except Exception:
181 181 log.exception("Exception updating user")
182 182 h.flash(_('Error occurred during update of user %s')
183 183 % form_result.get('username'), category='error')
184 184 return redirect(url('edit_user', user_id=user_id))
185 185
186 186 @HasPermissionAllDecorator('hg.admin')
187 187 @auth.CSRFRequired()
188 188 def delete(self, user_id):
189 189 user_id = safe_int(user_id)
190 190 c.user = User.get_or_404(user_id)
191 191
192 192 _repos = c.user.repositories
193 193 _repo_groups = c.user.repository_groups
194 194 _user_groups = c.user.user_groups
195 195
196 196 handle_repos = None
197 197 handle_repo_groups = None
198 198 handle_user_groups = None
199 199 # dummy call for flash of handle
200 200 set_handle_flash_repos = lambda: None
201 201 set_handle_flash_repo_groups = lambda: None
202 202 set_handle_flash_user_groups = lambda: None
203 203
204 204 if _repos and request.POST.get('user_repos'):
205 205 do = request.POST['user_repos']
206 206 if do == 'detach':
207 207 handle_repos = 'detach'
208 208 set_handle_flash_repos = lambda: h.flash(
209 209 _('Detached %s repositories') % len(_repos),
210 210 category='success')
211 211 elif do == 'delete':
212 212 handle_repos = 'delete'
213 213 set_handle_flash_repos = lambda: h.flash(
214 214 _('Deleted %s repositories') % len(_repos),
215 215 category='success')
216 216
217 217 if _repo_groups and request.POST.get('user_repo_groups'):
218 218 do = request.POST['user_repo_groups']
219 219 if do == 'detach':
220 220 handle_repo_groups = 'detach'
221 221 set_handle_flash_repo_groups = lambda: h.flash(
222 222 _('Detached %s repository groups') % len(_repo_groups),
223 223 category='success')
224 224 elif do == 'delete':
225 225 handle_repo_groups = 'delete'
226 226 set_handle_flash_repo_groups = lambda: h.flash(
227 227 _('Deleted %s repository groups') % len(_repo_groups),
228 228 category='success')
229 229
230 230 if _user_groups and request.POST.get('user_user_groups'):
231 231 do = request.POST['user_user_groups']
232 232 if do == 'detach':
233 233 handle_user_groups = 'detach'
234 234 set_handle_flash_user_groups = lambda: h.flash(
235 235 _('Detached %s user groups') % len(_user_groups),
236 236 category='success')
237 237 elif do == 'delete':
238 238 handle_user_groups = 'delete'
239 239 set_handle_flash_user_groups = lambda: h.flash(
240 240 _('Deleted %s user groups') % len(_user_groups),
241 241 category='success')
242 242
243 243 old_values = c.user.get_api_data()
244 244 try:
245 245 UserModel().delete(c.user, handle_repos=handle_repos,
246 246 handle_repo_groups=handle_repo_groups,
247 247 handle_user_groups=handle_user_groups)
248 248
249 249 audit_logger.store_web(
250 250 'user.delete', action_data={'old_data': old_values},
251 251 user=c.rhodecode_user)
252 252
253 253 Session().commit()
254 254 set_handle_flash_repos()
255 255 set_handle_flash_repo_groups()
256 256 set_handle_flash_user_groups()
257 257 h.flash(_('Successfully deleted user'), category='success')
258 258 except (UserOwnsReposException, UserOwnsRepoGroupsException,
259 259 UserOwnsUserGroupsException, DefaultUserException) as e:
260 260 h.flash(e, category='warning')
261 261 except Exception:
262 262 log.exception("Exception during deletion of user")
263 263 h.flash(_('An error occurred during deletion of user'),
264 264 category='error')
265 265 return redirect(h.route_path('users'))
266 266
267 267 @HasPermissionAllDecorator('hg.admin')
268 268 @auth.CSRFRequired()
269 269 def reset_password(self, user_id):
270 270 """
271 271 toggle reset password flag for this user
272 272 """
273 273 user_id = safe_int(user_id)
274 274 c.user = User.get_or_404(user_id)
275 275 try:
276 276 old_value = c.user.user_data.get('force_password_change')
277 277 c.user.update_userdata(force_password_change=not old_value)
278 278
279 279 if old_value:
280 280 msg = _('Force password change disabled for user')
281 281 audit_logger.store_web(
282 282 'user.edit.password_reset.disabled',
283 283 user=c.rhodecode_user)
284 284 else:
285 285 msg = _('Force password change enabled for user')
286 286 audit_logger.store_web(
287 287 'user.edit.password_reset.enabled',
288 288 user=c.rhodecode_user)
289 289
290 290 Session().commit()
291 291 h.flash(msg, category='success')
292 292 except Exception:
293 293 log.exception("Exception during password reset for user")
294 294 h.flash(_('An error occurred during password reset for user'),
295 295 category='error')
296 296
297 297 return redirect(url('edit_user_advanced', user_id=user_id))
298 298
299 299 @HasPermissionAllDecorator('hg.admin')
300 300 @auth.CSRFRequired()
301 301 def create_personal_repo_group(self, user_id):
302 302 """
303 303 Create personal repository group for this user
304 304 """
305 305 from rhodecode.model.repo_group import RepoGroupModel
306 306
307 307 user_id = safe_int(user_id)
308 308 c.user = User.get_or_404(user_id)
309 309 personal_repo_group = RepoGroup.get_user_personal_repo_group(
310 310 c.user.user_id)
311 311 if personal_repo_group:
312 312 return redirect(url('edit_user_advanced', user_id=user_id))
313 313
314 314 personal_repo_group_name = RepoGroupModel().get_personal_group_name(
315 315 c.user)
316 316 named_personal_group = RepoGroup.get_by_group_name(
317 317 personal_repo_group_name)
318 318 try:
319 319
320 320 if named_personal_group and named_personal_group.user_id == c.user.user_id:
321 321 # migrate the same named group, and mark it as personal
322 322 named_personal_group.personal = True
323 323 Session().add(named_personal_group)
324 324 Session().commit()
325 325 msg = _('Linked repository group `%s` as personal' % (
326 326 personal_repo_group_name,))
327 327 h.flash(msg, category='success')
328 328 elif not named_personal_group:
329 329 RepoGroupModel().create_personal_repo_group(c.user)
330 330
331 331 msg = _('Created repository group `%s`' % (
332 332 personal_repo_group_name,))
333 333 h.flash(msg, category='success')
334 334 else:
335 335 msg = _('Repository group `%s` is already taken' % (
336 336 personal_repo_group_name,))
337 337 h.flash(msg, category='warning')
338 338 except Exception:
339 339 log.exception("Exception during repository group creation")
340 340 msg = _(
341 341 'An error occurred during repository group creation for user')
342 342 h.flash(msg, category='error')
343 343 Session().rollback()
344 344
345 345 return redirect(url('edit_user_advanced', user_id=user_id))
346 346
347 347 @HasPermissionAllDecorator('hg.admin')
348 348 def show(self, user_id):
349 349 """GET /users/user_id: Show a specific item"""
350 350 # url('user', user_id=ID)
351 351 User.get_or_404(-1)
352 352
353 353 @HasPermissionAllDecorator('hg.admin')
354 354 def edit(self, user_id):
355 355 """GET /users/user_id/edit: Form to edit an existing item"""
356 356 # url('edit_user', user_id=ID)
357 357 user_id = safe_int(user_id)
358 358 c.user = User.get_or_404(user_id)
359 359 if c.user.username == User.DEFAULT_USER:
360 360 h.flash(_("You can't edit this user"), category='warning')
361 361 return redirect(h.route_path('users'))
362 362
363 363 c.active = 'profile'
364 364 c.extern_type = c.user.extern_type
365 365 c.extern_name = c.user.extern_name
366 366 c.perm_user = AuthUser(user_id=user_id, ip_addr=self.ip_addr)
367 367
368 368 defaults = c.user.get_dict()
369 369 defaults.update({'language': c.user.user_data.get('language')})
370 370 return htmlfill.render(
371 371 render('admin/users/user_edit.mako'),
372 372 defaults=defaults,
373 373 encoding="UTF-8",
374 374 force_defaults=False)
375 375
376 376 @HasPermissionAllDecorator('hg.admin')
377 377 def edit_advanced(self, user_id):
378 378 user_id = safe_int(user_id)
379 379 user = c.user = User.get_or_404(user_id)
380 380 if user.username == User.DEFAULT_USER:
381 381 h.flash(_("You can't edit this user"), category='warning')
382 382 return redirect(h.route_path('users'))
383 383
384 384 c.active = 'advanced'
385 385 c.personal_repo_group = RepoGroup.get_user_personal_repo_group(user_id)
386 386 c.personal_repo_group_name = RepoGroupModel()\
387 387 .get_personal_group_name(user)
388 388 c.first_admin = User.get_first_super_admin()
389 389 defaults = user.get_dict()
390 390
391 391 # Interim workaround if the user participated on any pull requests as a
392 392 # reviewer.
393 393 has_review = bool(PullRequestReviewers.query().filter(
394 394 PullRequestReviewers.user_id == user_id).first())
395 395 c.can_delete_user = not has_review
396 396 c.can_delete_user_message = _(
397 397 'The user participates as reviewer in pull requests and '
398 398 'cannot be deleted. You can set the user to '
399 399 '"inactive" instead of deleting it.') if has_review else ''
400 400
401 401 return htmlfill.render(
402 402 render('admin/users/user_edit.mako'),
403 403 defaults=defaults,
404 404 encoding="UTF-8",
405 405 force_defaults=False)
406 406
407 407 @HasPermissionAllDecorator('hg.admin')
408 408 def edit_global_perms(self, user_id):
409 409 user_id = safe_int(user_id)
410 410 c.user = User.get_or_404(user_id)
411 411 if c.user.username == User.DEFAULT_USER:
412 412 h.flash(_("You can't edit this user"), category='warning')
413 413 return redirect(h.route_path('users'))
414 414
415 415 c.active = 'global_perms'
416 416
417 417 c.default_user = User.get_default_user()
418 418 defaults = c.user.get_dict()
419 419 defaults.update(c.default_user.get_default_perms(suffix='_inherited'))
420 420 defaults.update(c.default_user.get_default_perms())
421 421 defaults.update(c.user.get_default_perms())
422 422
423 423 return htmlfill.render(
424 424 render('admin/users/user_edit.mako'),
425 425 defaults=defaults,
426 426 encoding="UTF-8",
427 427 force_defaults=False)
428 428
429 429 @HasPermissionAllDecorator('hg.admin')
430 430 @auth.CSRFRequired()
431 431 def update_global_perms(self, user_id):
432 432 user_id = safe_int(user_id)
433 433 user = User.get_or_404(user_id)
434 434 c.active = 'global_perms'
435 435 try:
436 436 # first stage that verifies the checkbox
437 437 _form = UserIndividualPermissionsForm()
438 438 form_result = _form.to_python(dict(request.POST))
439 439 inherit_perms = form_result['inherit_default_permissions']
440 440 user.inherit_default_permissions = inherit_perms
441 441 Session().add(user)
442 442
443 443 if not inherit_perms:
444 444 # only update the individual ones if we un check the flag
445 445 _form = UserPermissionsForm(
446 446 [x[0] for x in c.repo_create_choices],
447 447 [x[0] for x in c.repo_create_on_write_choices],
448 448 [x[0] for x in c.repo_group_create_choices],
449 449 [x[0] for x in c.user_group_create_choices],
450 450 [x[0] for x in c.fork_choices],
451 451 [x[0] for x in c.inherit_default_permission_choices])()
452 452
453 453 form_result = _form.to_python(dict(request.POST))
454 454 form_result.update({'perm_user_id': user.user_id})
455 455
456 456 PermissionModel().update_user_permissions(form_result)
457 457
458 458 # TODO(marcink): implement global permissions
459 459 # audit_log.store_web('user.edit.permissions')
460 460
461 461 Session().commit()
462 462 h.flash(_('User global permissions updated successfully'),
463 463 category='success')
464 464
465 465 except formencode.Invalid as errors:
466 466 defaults = errors.value
467 467 c.user = user
468 468 return htmlfill.render(
469 469 render('admin/users/user_edit.mako'),
470 470 defaults=defaults,
471 471 errors=errors.error_dict or {},
472 472 prefix_error=False,
473 473 encoding="UTF-8",
474 474 force_defaults=False)
475 475 except Exception:
476 476 log.exception("Exception during permissions saving")
477 477 h.flash(_('An error occurred during permissions saving'),
478 478 category='error')
479 479 return redirect(url('edit_user_global_perms', user_id=user_id))
480 480
481 481 @HasPermissionAllDecorator('hg.admin')
482 482 def edit_perms_summary(self, user_id):
483 483 user_id = safe_int(user_id)
484 484 c.user = User.get_or_404(user_id)
485 485 if c.user.username == User.DEFAULT_USER:
486 486 h.flash(_("You can't edit this user"), category='warning')
487 487 return redirect(h.route_path('users'))
488 488
489 489 c.active = 'perms_summary'
490 490 c.perm_user = AuthUser(user_id=user_id, ip_addr=self.ip_addr)
491 491
492 492 return render('admin/users/user_edit.mako')
493 493
494 494 @HasPermissionAllDecorator('hg.admin')
495 495 def edit_emails(self, user_id):
496 496 user_id = safe_int(user_id)
497 497 c.user = User.get_or_404(user_id)
498 498 if c.user.username == User.DEFAULT_USER:
499 499 h.flash(_("You can't edit this user"), category='warning')
500 500 return redirect(h.route_path('users'))
501 501
502 502 c.active = 'emails'
503 503 c.user_email_map = UserEmailMap.query() \
504 504 .filter(UserEmailMap.user == c.user).all()
505 505
506 506 defaults = c.user.get_dict()
507 507 return htmlfill.render(
508 508 render('admin/users/user_edit.mako'),
509 509 defaults=defaults,
510 510 encoding="UTF-8",
511 511 force_defaults=False)
512 512
513 513 @HasPermissionAllDecorator('hg.admin')
514 514 @auth.CSRFRequired()
515 515 def add_email(self, user_id):
516 516 user_id = safe_int(user_id)
517 517 c.user = User.get_or_404(user_id)
518 518
519 519 email = request.POST.get('new_email')
520 520 user_model = UserModel()
521 521 user_data = c.user.get_api_data()
522 522 try:
523 523 user_model.add_extra_email(user_id, email)
524 524 audit_logger.store_web(
525 525 'user.edit.email.add',
526 526 action_data={'email': email, 'user': user_data},
527 527 user=c.rhodecode_user)
528 528 Session().commit()
529 529 h.flash(_("Added new email address `%s` for user account") % email,
530 530 category='success')
531 531 except formencode.Invalid as error:
532 532 msg = error.error_dict['email']
533 533 h.flash(msg, category='error')
534 534 except Exception:
535 535 log.exception("Exception during email saving")
536 536 h.flash(_('An error occurred during email saving'),
537 537 category='error')
538 538 return redirect(url('edit_user_emails', user_id=user_id))
539 539
540 540 @HasPermissionAllDecorator('hg.admin')
541 541 @auth.CSRFRequired()
542 542 def delete_email(self, user_id):
543 543 user_id = safe_int(user_id)
544 544 c.user = User.get_or_404(user_id)
545 545 email_id = request.POST.get('del_email_id')
546 546 user_model = UserModel()
547 547
548 548 email = UserEmailMap.query().get(email_id).email
549 549 user_data = c.user.get_api_data()
550 550 user_model.delete_extra_email(user_id, email_id)
551 551 audit_logger.store_web(
552 552 'user.edit.email.delete',
553 553 action_data={'email': email, 'user': user_data},
554 554 user=c.rhodecode_user)
555 555 Session().commit()
556 556 h.flash(_("Removed email address from user account"), category='success')
557 557 return redirect(url('edit_user_emails', user_id=user_id))
558 558
559 559 @HasPermissionAllDecorator('hg.admin')
560 560 def edit_ips(self, user_id):
561 561 user_id = safe_int(user_id)
562 562 c.user = User.get_or_404(user_id)
563 563 if c.user.username == User.DEFAULT_USER:
564 564 h.flash(_("You can't edit this user"), category='warning')
565 565 return redirect(h.route_path('users'))
566 566
567 567 c.active = 'ips'
568 568 c.user_ip_map = UserIpMap.query() \
569 569 .filter(UserIpMap.user == c.user).all()
570 570
571 571 c.inherit_default_ips = c.user.inherit_default_permissions
572 572 c.default_user_ip_map = UserIpMap.query() \
573 573 .filter(UserIpMap.user == User.get_default_user()).all()
574 574
575 575 defaults = c.user.get_dict()
576 576 return htmlfill.render(
577 577 render('admin/users/user_edit.mako'),
578 578 defaults=defaults,
579 579 encoding="UTF-8",
580 580 force_defaults=False)
581 581
582 582 @HasPermissionAllDecorator('hg.admin')
583 583 @auth.CSRFRequired()
584 584 def add_ip(self, user_id):
585 585 user_id = safe_int(user_id)
586 586 c.user = User.get_or_404(user_id)
587 587 user_model = UserModel()
588 588 try:
589 589 ip_list = user_model.parse_ip_range(request.POST.get('new_ip'))
590 590 except Exception as e:
591 591 ip_list = []
592 592 log.exception("Exception during ip saving")
593 593 h.flash(_('An error occurred during ip saving:%s' % (e,)),
594 594 category='error')
595 595
596 596 desc = request.POST.get('description')
597 597 added = []
598 598 user_data = c.user.get_api_data()
599 599 for ip in ip_list:
600 600 try:
601 601 user_model.add_extra_ip(user_id, ip, desc)
602 602 audit_logger.store_web(
603 603 'user.edit.ip.add',
604 604 action_data={'ip': ip, 'user': user_data},
605 605 user=c.rhodecode_user)
606 606 Session().commit()
607 607 added.append(ip)
608 608 except formencode.Invalid as error:
609 609 msg = error.error_dict['ip']
610 610 h.flash(msg, category='error')
611 611 except Exception:
612 612 log.exception("Exception during ip saving")
613 613 h.flash(_('An error occurred during ip saving'),
614 614 category='error')
615 615 if added:
616 616 h.flash(
617 617 _("Added ips %s to user whitelist") % (', '.join(ip_list), ),
618 618 category='success')
619 619 if 'default_user' in request.POST:
620 620 return redirect(url('admin_permissions_ips'))
621 621 return redirect(url('edit_user_ips', user_id=user_id))
622 622
623 623 @HasPermissionAllDecorator('hg.admin')
624 624 @auth.CSRFRequired()
625 625 def delete_ip(self, user_id):
626 626 user_id = safe_int(user_id)
627 627 c.user = User.get_or_404(user_id)
628 628
629 629 ip_id = request.POST.get('del_ip_id')
630 630 user_model = UserModel()
631 user_data = c.user.get_api_data()
631 632 ip = UserIpMap.query().get(ip_id).ip_addr
632 user_data = c.user.get_api_data()
633 633 user_model.delete_extra_ip(user_id, ip_id)
634 634 audit_logger.store_web(
635 635 'user.edit.ip.delete',
636 636 action_data={'ip': ip, 'user': user_data},
637 637 user=c.rhodecode_user)
638 638 Session().commit()
639 639 h.flash(_("Removed ip address from user whitelist"), category='success')
640 640
641 641 if 'default_user' in request.POST:
642 642 return redirect(url('admin_permissions_ips'))
643 643 return redirect(url('edit_user_ips', user_id=user_id))
@@ -1,484 +1,484 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 commit controller for RhodeCode showing changes between commits
23 23 """
24 24
25 25 import logging
26 26
27 27 from collections import defaultdict
28 28 from webob.exc import HTTPForbidden, HTTPBadRequest, HTTPNotFound
29 29
30 30 from pylons import tmpl_context as c, request, response
31 31 from pylons.i18n.translation import _
32 32 from pylons.controllers.util import redirect
33 33
34 34 from rhodecode.lib import auth
35 35 from rhodecode.lib import diffs, codeblocks
36 36 from rhodecode.lib.auth import (
37 37 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous)
38 38 from rhodecode.lib.base import BaseRepoController, render
39 39 from rhodecode.lib.compat import OrderedDict
40 40 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
41 41 import rhodecode.lib.helpers as h
42 42 from rhodecode.lib.utils import jsonify
43 43 from rhodecode.lib.utils2 import safe_unicode
44 44 from rhodecode.lib.vcs.backends.base import EmptyCommit
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 RepositoryError, CommitDoesNotExistError, NodeDoesNotExistError)
47 47 from rhodecode.model.db import ChangesetComment, ChangesetStatus
48 48 from rhodecode.model.changeset_status import ChangesetStatusModel
49 49 from rhodecode.model.comment import CommentsModel
50 50 from rhodecode.model.meta import Session
51 51
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 def _update_with_GET(params, GET):
57 57 for k in ['diff1', 'diff2', 'diff']:
58 58 params[k] += GET.getall(k)
59 59
60 60
61 61 def get_ignore_ws(fid, GET):
62 62 ig_ws_global = GET.get('ignorews')
63 63 ig_ws = filter(lambda k: k.startswith('WS'), GET.getall(fid))
64 64 if ig_ws:
65 65 try:
66 66 return int(ig_ws[0].split(':')[-1])
67 67 except Exception:
68 68 pass
69 69 return ig_ws_global
70 70
71 71
72 72 def _ignorews_url(GET, fileid=None):
73 73 fileid = str(fileid) if fileid else None
74 74 params = defaultdict(list)
75 75 _update_with_GET(params, GET)
76 76 label = _('Show whitespace')
77 77 tooltiplbl = _('Show whitespace for all diffs')
78 78 ig_ws = get_ignore_ws(fileid, GET)
79 79 ln_ctx = get_line_ctx(fileid, GET)
80 80
81 81 if ig_ws is None:
82 82 params['ignorews'] += [1]
83 83 label = _('Ignore whitespace')
84 84 tooltiplbl = _('Ignore whitespace for all diffs')
85 85 ctx_key = 'context'
86 86 ctx_val = ln_ctx
87 87
88 88 # if we have passed in ln_ctx pass it along to our params
89 89 if ln_ctx:
90 90 params[ctx_key] += [ctx_val]
91 91
92 92 if fileid:
93 93 params['anchor'] = 'a_' + fileid
94 94 return h.link_to(label, h.url.current(**params), title=tooltiplbl, class_='tooltip')
95 95
96 96
97 97 def get_line_ctx(fid, GET):
98 98 ln_ctx_global = GET.get('context')
99 99 if fid:
100 100 ln_ctx = filter(lambda k: k.startswith('C'), GET.getall(fid))
101 101 else:
102 102 _ln_ctx = filter(lambda k: k.startswith('C'), GET)
103 103 ln_ctx = GET.get(_ln_ctx[0]) if _ln_ctx else ln_ctx_global
104 104 if ln_ctx:
105 105 ln_ctx = [ln_ctx]
106 106
107 107 if ln_ctx:
108 108 retval = ln_ctx[0].split(':')[-1]
109 109 else:
110 110 retval = ln_ctx_global
111 111
112 112 try:
113 113 return int(retval)
114 114 except Exception:
115 115 return 3
116 116
117 117
118 118 def _context_url(GET, fileid=None):
119 119 """
120 120 Generates a url for context lines.
121 121
122 122 :param fileid:
123 123 """
124 124
125 125 fileid = str(fileid) if fileid else None
126 126 ig_ws = get_ignore_ws(fileid, GET)
127 127 ln_ctx = (get_line_ctx(fileid, GET) or 3) * 2
128 128
129 129 params = defaultdict(list)
130 130 _update_with_GET(params, GET)
131 131
132 132 if ln_ctx > 0:
133 133 params['context'] += [ln_ctx]
134 134
135 135 if ig_ws:
136 136 ig_ws_key = 'ignorews'
137 137 ig_ws_val = 1
138 138 params[ig_ws_key] += [ig_ws_val]
139 139
140 140 lbl = _('Increase context')
141 141 tooltiplbl = _('Increase context for all diffs')
142 142
143 143 if fileid:
144 144 params['anchor'] = 'a_' + fileid
145 145 return h.link_to(lbl, h.url.current(**params), title=tooltiplbl, class_='tooltip')
146 146
147 147
148 148 class ChangesetController(BaseRepoController):
149 149
150 150 def __before__(self):
151 151 super(ChangesetController, self).__before__()
152 152 c.affected_files_cut_off = 60
153 153
154 154 def _index(self, commit_id_range, method):
155 155 c.ignorews_url = _ignorews_url
156 156 c.context_url = _context_url
157 157 c.fulldiff = fulldiff = request.GET.get('fulldiff')
158 158
159 159 # fetch global flags of ignore ws or context lines
160 160 context_lcl = get_line_ctx('', request.GET)
161 161 ign_whitespace_lcl = get_ignore_ws('', request.GET)
162 162
163 163 # diff_limit will cut off the whole diff if the limit is applied
164 164 # otherwise it will just hide the big files from the front-end
165 165 diff_limit = self.cut_off_limit_diff
166 166 file_limit = self.cut_off_limit_file
167 167
168 168 # get ranges of commit ids if preset
169 169 commit_range = commit_id_range.split('...')[:2]
170 170
171 171 try:
172 172 pre_load = ['affected_files', 'author', 'branch', 'date',
173 173 'message', 'parents']
174 174
175 175 if len(commit_range) == 2:
176 176 commits = c.rhodecode_repo.get_commits(
177 177 start_id=commit_range[0], end_id=commit_range[1],
178 178 pre_load=pre_load)
179 179 commits = list(commits)
180 180 else:
181 181 commits = [c.rhodecode_repo.get_commit(
182 182 commit_id=commit_id_range, pre_load=pre_load)]
183 183
184 184 c.commit_ranges = commits
185 185 if not c.commit_ranges:
186 186 raise RepositoryError(
187 187 'The commit range returned an empty result')
188 188 except CommitDoesNotExistError:
189 189 msg = _('No such commit exists for this repository')
190 190 h.flash(msg, category='error')
191 191 raise HTTPNotFound()
192 192 except Exception:
193 193 log.exception("General failure")
194 194 raise HTTPNotFound()
195 195
196 196 c.changes = OrderedDict()
197 197 c.lines_added = 0
198 198 c.lines_deleted = 0
199 199
200 200 # auto collapse if we have more than limit
201 201 collapse_limit = diffs.DiffProcessor._collapse_commits_over
202 202 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
203 203
204 204 c.commit_statuses = ChangesetStatus.STATUSES
205 205 c.inline_comments = []
206 206 c.files = []
207 207
208 208 c.statuses = []
209 209 c.comments = []
210 210 c.unresolved_comments = []
211 211 if len(c.commit_ranges) == 1:
212 212 commit = c.commit_ranges[0]
213 213 c.comments = CommentsModel().get_comments(
214 214 c.rhodecode_db_repo.repo_id,
215 215 revision=commit.raw_id)
216 216 c.statuses.append(ChangesetStatusModel().get_status(
217 217 c.rhodecode_db_repo.repo_id, commit.raw_id))
218 218 # comments from PR
219 219 statuses = ChangesetStatusModel().get_statuses(
220 220 c.rhodecode_db_repo.repo_id, commit.raw_id,
221 221 with_revisions=True)
222 222 prs = set(st.pull_request for st in statuses
223 223 if st.pull_request is not None)
224 224 # from associated statuses, check the pull requests, and
225 225 # show comments from them
226 226 for pr in prs:
227 227 c.comments.extend(pr.comments)
228 228
229 229 c.unresolved_comments = CommentsModel()\
230 230 .get_commit_unresolved_todos(commit.raw_id)
231 231
232 232 # Iterate over ranges (default commit view is always one commit)
233 233 for commit in c.commit_ranges:
234 234 c.changes[commit.raw_id] = []
235 235
236 236 commit2 = commit
237 237 commit1 = commit.parents[0] if commit.parents else EmptyCommit()
238 238
239 239 _diff = c.rhodecode_repo.get_diff(
240 240 commit1, commit2,
241 241 ignore_whitespace=ign_whitespace_lcl, context=context_lcl)
242 242 diff_processor = diffs.DiffProcessor(
243 243 _diff, format='newdiff', diff_limit=diff_limit,
244 244 file_limit=file_limit, show_full_diff=fulldiff)
245 245
246 246 commit_changes = OrderedDict()
247 247 if method == 'show':
248 248 _parsed = diff_processor.prepare()
249 249 c.limited_diff = isinstance(_parsed, diffs.LimitedDiffContainer)
250 250
251 251 _parsed = diff_processor.prepare()
252 252
253 253 def _node_getter(commit):
254 254 def get_node(fname):
255 255 try:
256 256 return commit.get_node(fname)
257 257 except NodeDoesNotExistError:
258 258 return None
259 259 return get_node
260 260
261 261 inline_comments = CommentsModel().get_inline_comments(
262 262 c.rhodecode_db_repo.repo_id, revision=commit.raw_id)
263 263 c.inline_cnt = CommentsModel().get_inline_comments_count(
264 264 inline_comments)
265 265
266 266 diffset = codeblocks.DiffSet(
267 267 repo_name=c.repo_name,
268 268 source_node_getter=_node_getter(commit1),
269 269 target_node_getter=_node_getter(commit2),
270 270 comments=inline_comments
271 271 ).render_patchset(_parsed, commit1.raw_id, commit2.raw_id)
272 272 c.changes[commit.raw_id] = diffset
273 273 else:
274 274 # downloads/raw we only need RAW diff nothing else
275 275 diff = diff_processor.as_raw()
276 276 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
277 277
278 278 # sort comments by how they were generated
279 279 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
280 280
281 281 if len(c.commit_ranges) == 1:
282 282 c.commit = c.commit_ranges[0]
283 283 c.parent_tmpl = ''.join(
284 284 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
285 285 if method == 'download':
286 286 response.content_type = 'text/plain'
287 287 response.content_disposition = (
288 288 'attachment; filename=%s.diff' % commit_id_range[:12])
289 289 return diff
290 290 elif method == 'patch':
291 291 response.content_type = 'text/plain'
292 292 c.diff = safe_unicode(diff)
293 293 return render('changeset/patch_changeset.mako')
294 294 elif method == 'raw':
295 295 response.content_type = 'text/plain'
296 296 return diff
297 297 elif method == 'show':
298 298 if len(c.commit_ranges) == 1:
299 299 return render('changeset/changeset.mako')
300 300 else:
301 301 c.ancestor = None
302 302 c.target_repo = c.rhodecode_db_repo
303 303 return render('changeset/changeset_range.mako')
304 304
305 305 @LoginRequired()
306 306 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
307 307 'repository.admin')
308 308 def index(self, revision, method='show'):
309 309 return self._index(revision, method=method)
310 310
311 311 @LoginRequired()
312 312 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
313 313 'repository.admin')
314 314 def changeset_raw(self, revision):
315 315 return self._index(revision, method='raw')
316 316
317 317 @LoginRequired()
318 318 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
319 319 'repository.admin')
320 320 def changeset_patch(self, revision):
321 321 return self._index(revision, method='patch')
322 322
323 323 @LoginRequired()
324 324 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
325 325 'repository.admin')
326 326 def changeset_download(self, revision):
327 327 return self._index(revision, method='download')
328 328
329 329 @LoginRequired()
330 330 @NotAnonymous()
331 331 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
332 332 'repository.admin')
333 333 @auth.CSRFRequired()
334 334 @jsonify
335 335 def comment(self, repo_name, revision):
336 336 commit_id = revision
337 337 status = request.POST.get('changeset_status', None)
338 338 text = request.POST.get('text')
339 339 comment_type = request.POST.get('comment_type')
340 340 resolves_comment_id = request.POST.get('resolves_comment_id', None)
341 341
342 342 if status:
343 343 text = text or (_('Status change %(transition_icon)s %(status)s')
344 344 % {'transition_icon': '>',
345 345 'status': ChangesetStatus.get_status_lbl(status)})
346 346
347 347 multi_commit_ids = []
348 348 for _commit_id in request.POST.get('commit_ids', '').split(','):
349 349 if _commit_id not in ['', None, EmptyCommit.raw_id]:
350 350 if _commit_id not in multi_commit_ids:
351 351 multi_commit_ids.append(_commit_id)
352 352
353 353 commit_ids = multi_commit_ids or [commit_id]
354 354
355 355 comment = None
356 356 for current_id in filter(None, commit_ids):
357 357 c.co = comment = CommentsModel().create(
358 358 text=text,
359 359 repo=c.rhodecode_db_repo.repo_id,
360 360 user=c.rhodecode_user.user_id,
361 361 commit_id=current_id,
362 362 f_path=request.POST.get('f_path'),
363 363 line_no=request.POST.get('line'),
364 364 status_change=(ChangesetStatus.get_status_lbl(status)
365 365 if status else None),
366 366 status_change_type=status,
367 367 comment_type=comment_type,
368 368 resolves_comment_id=resolves_comment_id
369 369 )
370 370
371 371 # get status if set !
372 372 if status:
373 373 # if latest status was from pull request and it's closed
374 374 # disallow changing status !
375 375 # dont_allow_on_closed_pull_request = True !
376 376
377 377 try:
378 378 ChangesetStatusModel().set_status(
379 379 c.rhodecode_db_repo.repo_id,
380 380 status,
381 381 c.rhodecode_user.user_id,
382 382 comment,
383 383 revision=current_id,
384 384 dont_allow_on_closed_pull_request=True
385 385 )
386 386 except StatusChangeOnClosedPullRequestError:
387 387 msg = _('Changing the status of a commit associated with '
388 388 'a closed pull request is not allowed')
389 389 log.exception(msg)
390 390 h.flash(msg, category='warning')
391 391 return redirect(h.url(
392 392 'changeset_home', repo_name=repo_name,
393 393 revision=current_id))
394 394
395 395 # finalize, commit and redirect
396 396 Session().commit()
397 397
398 398 data = {
399 399 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
400 400 }
401 401 if comment:
402 402 data.update(comment.get_dict())
403 403 data.update({'rendered_text':
404 404 render('changeset/changeset_comment_block.mako')})
405 405
406 406 return data
407 407
408 408 @LoginRequired()
409 409 @NotAnonymous()
410 410 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
411 411 'repository.admin')
412 412 @auth.CSRFRequired()
413 413 def preview_comment(self):
414 414 # Technically a CSRF token is not needed as no state changes with this
415 415 # call. However, as this is a POST is better to have it, so automated
416 416 # tools don't flag it as potential CSRF.
417 417 # Post is required because the payload could be bigger than the maximum
418 418 # allowed by GET.
419 419 if not request.environ.get('HTTP_X_PARTIAL_XHR'):
420 420 raise HTTPBadRequest()
421 421 text = request.POST.get('text')
422 422 renderer = request.POST.get('renderer') or 'rst'
423 423 if text:
424 424 return h.render(text, renderer=renderer, mentions=True)
425 425 return ''
426 426
427 427 @LoginRequired()
428 428 @NotAnonymous()
429 429 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
430 430 'repository.admin')
431 431 @auth.CSRFRequired()
432 432 @jsonify
433 433 def delete_comment(self, repo_name, comment_id):
434 434 comment = ChangesetComment.get(comment_id)
435 435 if not comment:
436 436 log.debug('Comment with id:%s not found, skipping', comment_id)
437 437 # comment already deleted in another call probably
438 438 return True
439 439
440 440 owner = (comment.author.user_id == c.rhodecode_user.user_id)
441 441 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name)
442 442 if h.HasPermissionAny('hg.admin')() or is_repo_admin or owner:
443 CommentsModel().delete(comment=comment)
443 CommentsModel().delete(comment=comment, user=c.rhodecode_user)
444 444 Session().commit()
445 445 return True
446 446 else:
447 447 raise HTTPForbidden()
448 448
449 449 @LoginRequired()
450 450 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
451 451 'repository.admin')
452 452 @jsonify
453 453 def changeset_info(self, repo_name, revision):
454 454 if request.is_xhr:
455 455 try:
456 456 return c.rhodecode_repo.get_commit(commit_id=revision)
457 457 except CommitDoesNotExistError as e:
458 458 return EmptyCommit(message=str(e))
459 459 else:
460 460 raise HTTPBadRequest()
461 461
462 462 @LoginRequired()
463 463 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
464 464 'repository.admin')
465 465 @jsonify
466 466 def changeset_children(self, repo_name, revision):
467 467 if request.is_xhr:
468 468 commit = c.rhodecode_repo.get_commit(commit_id=revision)
469 469 result = {"results": commit.children}
470 470 return result
471 471 else:
472 472 raise HTTPBadRequest()
473 473
474 474 @LoginRequired()
475 475 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
476 476 'repository.admin')
477 477 @jsonify
478 478 def changeset_parents(self, repo_name, revision):
479 479 if request.is_xhr:
480 480 commit = c.rhodecode_repo.get_commit(commit_id=revision)
481 481 result = {"results": commit.parents}
482 482 return result
483 483 else:
484 484 raise HTTPBadRequest()
@@ -1,1110 +1,1110 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Files controller for RhodeCode Enterprise
23 23 """
24 24
25 25 import itertools
26 26 import logging
27 27 import os
28 28 import shutil
29 29 import tempfile
30 30
31 31 from pylons import request, response, tmpl_context as c, url
32 32 from pylons.i18n.translation import _
33 33 from pylons.controllers.util import redirect
34 34 from webob.exc import HTTPNotFound, HTTPBadRequest
35 35
36 36 from rhodecode.controllers.utils import parse_path_ref
37 37 from rhodecode.lib import diffs, helpers as h, caches
38 38 from rhodecode.lib import audit_logger
39 39 from rhodecode.lib.codeblocks import (
40 40 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
41 from rhodecode.lib.utils import jsonify, action_logger
41 from rhodecode.lib.utils import jsonify
42 42 from rhodecode.lib.utils2 import (
43 43 convert_line_endings, detect_mode, safe_str, str2bool)
44 44 from rhodecode.lib.auth import (
45 45 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired, XHRRequired)
46 46 from rhodecode.lib.base import BaseRepoController, render
47 47 from rhodecode.lib.vcs import path as vcspath
48 48 from rhodecode.lib.vcs.backends.base import EmptyCommit
49 49 from rhodecode.lib.vcs.conf import settings
50 50 from rhodecode.lib.vcs.exceptions import (
51 51 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
52 52 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
53 53 NodeDoesNotExistError, CommitError, NodeError)
54 54 from rhodecode.lib.vcs.nodes import FileNode
55 55
56 56 from rhodecode.model.repo import RepoModel
57 57 from rhodecode.model.scm import ScmModel
58 58 from rhodecode.model.db import Repository
59 59
60 60 from rhodecode.controllers.changeset import (
61 61 _ignorews_url, _context_url, get_line_ctx, get_ignore_ws)
62 62 from rhodecode.lib.exceptions import NonRelativePathError
63 63
64 64 log = logging.getLogger(__name__)
65 65
66 66
67 67 class FilesController(BaseRepoController):
68 68
69 69 def __before__(self):
70 70 super(FilesController, self).__before__()
71 71 c.cut_off_limit = self.cut_off_limit_file
72 72
73 73 def _get_default_encoding(self):
74 74 enc_list = getattr(c, 'default_encodings', [])
75 75 return enc_list[0] if enc_list else 'UTF-8'
76 76
77 77 def __get_commit_or_redirect(self, commit_id, repo_name,
78 78 redirect_after=True):
79 79 """
80 80 This is a safe way to get commit. If an error occurs it redirects to
81 81 tip with proper message
82 82
83 83 :param commit_id: id of commit to fetch
84 84 :param repo_name: repo name to redirect after
85 85 :param redirect_after: toggle redirection
86 86 """
87 87 try:
88 88 return c.rhodecode_repo.get_commit(commit_id)
89 89 except EmptyRepositoryError:
90 90 if not redirect_after:
91 91 return None
92 92 url_ = url('files_add_home',
93 93 repo_name=c.repo_name,
94 94 revision=0, f_path='', anchor='edit')
95 95 if h.HasRepoPermissionAny(
96 96 'repository.write', 'repository.admin')(c.repo_name):
97 97 add_new = h.link_to(
98 98 _('Click here to add a new file.'),
99 99 url_, class_="alert-link")
100 100 else:
101 101 add_new = ""
102 102 h.flash(h.literal(
103 103 _('There are no files yet. %s') % add_new), category='warning')
104 104 redirect(h.route_path('repo_summary', repo_name=repo_name))
105 105 except (CommitDoesNotExistError, LookupError):
106 106 msg = _('No such commit exists for this repository')
107 107 h.flash(msg, category='error')
108 108 raise HTTPNotFound()
109 109 except RepositoryError as e:
110 110 h.flash(safe_str(e), category='error')
111 111 raise HTTPNotFound()
112 112
113 113 def __get_filenode_or_redirect(self, repo_name, commit, path):
114 114 """
115 115 Returns file_node, if error occurs or given path is directory,
116 116 it'll redirect to top level path
117 117
118 118 :param repo_name: repo_name
119 119 :param commit: given commit
120 120 :param path: path to lookup
121 121 """
122 122 try:
123 123 file_node = commit.get_node(path)
124 124 if file_node.is_dir():
125 125 raise RepositoryError('The given path is a directory')
126 126 except CommitDoesNotExistError:
127 127 msg = _('No such commit exists for this repository')
128 128 log.exception(msg)
129 129 h.flash(msg, category='error')
130 130 raise HTTPNotFound()
131 131 except RepositoryError as e:
132 132 h.flash(safe_str(e), category='error')
133 133 raise HTTPNotFound()
134 134
135 135 return file_node
136 136
137 137 def __get_tree_cache_manager(self, repo_name, namespace_type):
138 138 _namespace = caches.get_repo_namespace_key(namespace_type, repo_name)
139 139 return caches.get_cache_manager('repo_cache_long', _namespace)
140 140
141 141 def _get_tree_at_commit(self, repo_name, commit_id, f_path,
142 142 full_load=False, force=False):
143 143 def _cached_tree():
144 144 log.debug('Generating cached file tree for %s, %s, %s',
145 145 repo_name, commit_id, f_path)
146 146 c.full_load = full_load
147 147 return render('files/files_browser_tree.mako')
148 148
149 149 cache_manager = self.__get_tree_cache_manager(
150 150 repo_name, caches.FILE_TREE)
151 151
152 152 cache_key = caches.compute_key_from_params(
153 153 repo_name, commit_id, f_path)
154 154
155 155 if force:
156 156 # we want to force recompute of caches
157 157 cache_manager.remove_value(cache_key)
158 158
159 159 return cache_manager.get(cache_key, createfunc=_cached_tree)
160 160
161 161 def _get_nodelist_at_commit(self, repo_name, commit_id, f_path):
162 162 def _cached_nodes():
163 163 log.debug('Generating cached nodelist for %s, %s, %s',
164 164 repo_name, commit_id, f_path)
165 165 _d, _f = ScmModel().get_nodes(
166 166 repo_name, commit_id, f_path, flat=False)
167 167 return _d + _f
168 168
169 169 cache_manager = self.__get_tree_cache_manager(
170 170 repo_name, caches.FILE_SEARCH_TREE_META)
171 171
172 172 cache_key = caches.compute_key_from_params(
173 173 repo_name, commit_id, f_path)
174 174 return cache_manager.get(cache_key, createfunc=_cached_nodes)
175 175
176 176 @LoginRequired()
177 177 @HasRepoPermissionAnyDecorator(
178 178 'repository.read', 'repository.write', 'repository.admin')
179 179 def index(
180 180 self, repo_name, revision, f_path, annotate=False, rendered=False):
181 181 commit_id = revision
182 182
183 183 # redirect to given commit_id from form if given
184 184 get_commit_id = request.GET.get('at_rev', None)
185 185 if get_commit_id:
186 186 self.__get_commit_or_redirect(get_commit_id, repo_name)
187 187
188 188 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
189 189 c.branch = request.GET.get('branch', None)
190 190 c.f_path = f_path
191 191 c.annotate = annotate
192 192 # default is false, but .rst/.md files later are autorendered, we can
193 193 # overwrite autorendering by setting this GET flag
194 194 c.renderer = rendered or not request.GET.get('no-render', False)
195 195
196 196 # prev link
197 197 try:
198 198 prev_commit = c.commit.prev(c.branch)
199 199 c.prev_commit = prev_commit
200 200 c.url_prev = url('files_home', repo_name=c.repo_name,
201 201 revision=prev_commit.raw_id, f_path=f_path)
202 202 if c.branch:
203 203 c.url_prev += '?branch=%s' % c.branch
204 204 except (CommitDoesNotExistError, VCSError):
205 205 c.url_prev = '#'
206 206 c.prev_commit = EmptyCommit()
207 207
208 208 # next link
209 209 try:
210 210 next_commit = c.commit.next(c.branch)
211 211 c.next_commit = next_commit
212 212 c.url_next = url('files_home', repo_name=c.repo_name,
213 213 revision=next_commit.raw_id, f_path=f_path)
214 214 if c.branch:
215 215 c.url_next += '?branch=%s' % c.branch
216 216 except (CommitDoesNotExistError, VCSError):
217 217 c.url_next = '#'
218 218 c.next_commit = EmptyCommit()
219 219
220 220 # files or dirs
221 221 try:
222 222 c.file = c.commit.get_node(f_path)
223 223 c.file_author = True
224 224 c.file_tree = ''
225 225 if c.file.is_file():
226 226 c.lf_node = c.file.get_largefile_node()
227 227
228 228 c.file_source_page = 'true'
229 229 c.file_last_commit = c.file.last_commit
230 230 if c.file.size < self.cut_off_limit_file:
231 231 if c.annotate: # annotation has precedence over renderer
232 232 c.annotated_lines = filenode_as_annotated_lines_tokens(
233 233 c.file
234 234 )
235 235 else:
236 236 c.renderer = (
237 237 c.renderer and h.renderer_from_filename(c.file.path)
238 238 )
239 239 if not c.renderer:
240 240 c.lines = filenode_as_lines_tokens(c.file)
241 241
242 242 c.on_branch_head = self._is_valid_head(
243 243 commit_id, c.rhodecode_repo)
244 244
245 245 branch = c.commit.branch if (
246 246 c.commit.branch and '/' not in c.commit.branch) else None
247 247 c.branch_or_raw_id = branch or c.commit.raw_id
248 248 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
249 249
250 250 author = c.file_last_commit.author
251 251 c.authors = [(h.email(author),
252 252 h.person(author, 'username_or_name_or_email'))]
253 253 else:
254 254 c.file_source_page = 'false'
255 255 c.authors = []
256 256 c.file_tree = self._get_tree_at_commit(
257 257 repo_name, c.commit.raw_id, f_path)
258 258
259 259 except RepositoryError as e:
260 260 h.flash(safe_str(e), category='error')
261 261 raise HTTPNotFound()
262 262
263 263 if request.environ.get('HTTP_X_PJAX'):
264 264 return render('files/files_pjax.mako')
265 265
266 266 return render('files/files.mako')
267 267
268 268 @LoginRequired()
269 269 @HasRepoPermissionAnyDecorator(
270 270 'repository.read', 'repository.write', 'repository.admin')
271 271 def annotate_previous(self, repo_name, revision, f_path):
272 272
273 273 commit_id = revision
274 274 commit = self.__get_commit_or_redirect(commit_id, repo_name)
275 275 prev_commit_id = commit.raw_id
276 276
277 277 f_path = f_path
278 278 is_file = False
279 279 try:
280 280 _file = commit.get_node(f_path)
281 281 is_file = _file.is_file()
282 282 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
283 283 pass
284 284
285 285 if is_file:
286 286 history = commit.get_file_history(f_path)
287 287 prev_commit_id = history[1].raw_id \
288 288 if len(history) > 1 else prev_commit_id
289 289
290 290 return redirect(h.url(
291 291 'files_annotate_home', repo_name=repo_name,
292 292 revision=prev_commit_id, f_path=f_path))
293 293
294 294 @LoginRequired()
295 295 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
296 296 'repository.admin')
297 297 @jsonify
298 298 def history(self, repo_name, revision, f_path):
299 299 commit = self.__get_commit_or_redirect(revision, repo_name)
300 300 f_path = f_path
301 301 _file = commit.get_node(f_path)
302 302 if _file.is_file():
303 303 file_history, _hist = self._get_node_history(commit, f_path)
304 304
305 305 res = []
306 306 for obj in file_history:
307 307 res.append({
308 308 'text': obj[1],
309 309 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]]
310 310 })
311 311
312 312 data = {
313 313 'more': False,
314 314 'results': res
315 315 }
316 316 return data
317 317
318 318 @LoginRequired()
319 319 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
320 320 'repository.admin')
321 321 def authors(self, repo_name, revision, f_path):
322 322 commit = self.__get_commit_or_redirect(revision, repo_name)
323 323 file_node = commit.get_node(f_path)
324 324 if file_node.is_file():
325 325 c.file_last_commit = file_node.last_commit
326 326 if request.GET.get('annotate') == '1':
327 327 # use _hist from annotation if annotation mode is on
328 328 commit_ids = set(x[1] for x in file_node.annotate)
329 329 _hist = (
330 330 c.rhodecode_repo.get_commit(commit_id)
331 331 for commit_id in commit_ids)
332 332 else:
333 333 _f_history, _hist = self._get_node_history(commit, f_path)
334 334 c.file_author = False
335 335 c.authors = []
336 336 for author in set(commit.author for commit in _hist):
337 337 c.authors.append((
338 338 h.email(author),
339 339 h.person(author, 'username_or_name_or_email')))
340 340 return render('files/file_authors_box.mako')
341 341
342 342 @LoginRequired()
343 343 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
344 344 'repository.admin')
345 345 def rawfile(self, repo_name, revision, f_path):
346 346 """
347 347 Action for download as raw
348 348 """
349 349 commit = self.__get_commit_or_redirect(revision, repo_name)
350 350 file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path)
351 351
352 352 if request.GET.get('lf'):
353 353 # only if lf get flag is passed, we download this file
354 354 # as LFS/Largefile
355 355 lf_node = file_node.get_largefile_node()
356 356 if lf_node:
357 357 # overwrite our pointer with the REAL large-file
358 358 file_node = lf_node
359 359
360 360 response.content_disposition = 'attachment; filename=%s' % \
361 361 safe_str(f_path.split(Repository.NAME_SEP)[-1])
362 362
363 363 response.content_type = file_node.mimetype
364 364 charset = self._get_default_encoding()
365 365 if charset:
366 366 response.charset = charset
367 367
368 368 return file_node.content
369 369
370 370 @LoginRequired()
371 371 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
372 372 'repository.admin')
373 373 def raw(self, repo_name, revision, f_path):
374 374 """
375 375 Action for show as raw, some mimetypes are "rendered",
376 376 those include images, icons.
377 377 """
378 378 commit = self.__get_commit_or_redirect(revision, repo_name)
379 379 file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path)
380 380
381 381 raw_mimetype_mapping = {
382 382 # map original mimetype to a mimetype used for "show as raw"
383 383 # you can also provide a content-disposition to override the
384 384 # default "attachment" disposition.
385 385 # orig_type: (new_type, new_dispo)
386 386
387 387 # show images inline:
388 388 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
389 389 # for example render an SVG with javascript inside or even render
390 390 # HTML.
391 391 'image/x-icon': ('image/x-icon', 'inline'),
392 392 'image/png': ('image/png', 'inline'),
393 393 'image/gif': ('image/gif', 'inline'),
394 394 'image/jpeg': ('image/jpeg', 'inline'),
395 395 'application/pdf': ('application/pdf', 'inline'),
396 396 }
397 397
398 398 mimetype = file_node.mimetype
399 399 try:
400 400 mimetype, dispo = raw_mimetype_mapping[mimetype]
401 401 except KeyError:
402 402 # we don't know anything special about this, handle it safely
403 403 if file_node.is_binary:
404 404 # do same as download raw for binary files
405 405 mimetype, dispo = 'application/octet-stream', 'attachment'
406 406 else:
407 407 # do not just use the original mimetype, but force text/plain,
408 408 # otherwise it would serve text/html and that might be unsafe.
409 409 # Note: underlying vcs library fakes text/plain mimetype if the
410 410 # mimetype can not be determined and it thinks it is not
411 411 # binary.This might lead to erroneous text display in some
412 412 # cases, but helps in other cases, like with text files
413 413 # without extension.
414 414 mimetype, dispo = 'text/plain', 'inline'
415 415
416 416 if dispo == 'attachment':
417 417 dispo = 'attachment; filename=%s' % safe_str(
418 418 f_path.split(os.sep)[-1])
419 419
420 420 response.content_disposition = dispo
421 421 response.content_type = mimetype
422 422 charset = self._get_default_encoding()
423 423 if charset:
424 424 response.charset = charset
425 425 return file_node.content
426 426
427 427 @CSRFRequired()
428 428 @LoginRequired()
429 429 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
430 430 def delete(self, repo_name, revision, f_path):
431 431 commit_id = revision
432 432
433 433 repo = c.rhodecode_db_repo
434 434 if repo.enable_locking and repo.locked[0]:
435 435 h.flash(_('This repository has been locked by %s on %s')
436 436 % (h.person_by_id(repo.locked[0]),
437 437 h.format_date(h.time_to_datetime(repo.locked[1]))),
438 438 'warning')
439 439 return redirect(h.url('files_home',
440 440 repo_name=repo_name, revision='tip'))
441 441
442 442 if not self._is_valid_head(commit_id, repo.scm_instance()):
443 443 h.flash(_('You can only delete files with revision '
444 444 'being a valid branch '), category='warning')
445 445 return redirect(h.url('files_home',
446 446 repo_name=repo_name, revision='tip',
447 447 f_path=f_path))
448 448
449 449 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
450 450 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
451 451
452 452 c.default_message = _(
453 453 'Deleted file %s via RhodeCode Enterprise') % (f_path)
454 454 c.f_path = f_path
455 455 node_path = f_path
456 456 author = c.rhodecode_user.full_contact
457 457 message = request.POST.get('message') or c.default_message
458 458 try:
459 459 nodes = {
460 460 node_path: {
461 461 'content': ''
462 462 }
463 463 }
464 464 self.scm_model.delete_nodes(
465 465 user=c.rhodecode_user.user_id, repo=c.rhodecode_db_repo,
466 466 message=message,
467 467 nodes=nodes,
468 468 parent_commit=c.commit,
469 469 author=author,
470 470 )
471 471
472 472 h.flash(_('Successfully deleted file %s') % f_path,
473 473 category='success')
474 474 except Exception:
475 475 msg = _('Error occurred during commit')
476 476 log.exception(msg)
477 477 h.flash(msg, category='error')
478 478 return redirect(url('changeset_home',
479 479 repo_name=c.repo_name, revision='tip'))
480 480
481 481 @LoginRequired()
482 482 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
483 483 def delete_home(self, repo_name, revision, f_path):
484 484 commit_id = revision
485 485
486 486 repo = c.rhodecode_db_repo
487 487 if repo.enable_locking and repo.locked[0]:
488 488 h.flash(_('This repository has been locked by %s on %s')
489 489 % (h.person_by_id(repo.locked[0]),
490 490 h.format_date(h.time_to_datetime(repo.locked[1]))),
491 491 'warning')
492 492 return redirect(h.url('files_home',
493 493 repo_name=repo_name, revision='tip'))
494 494
495 495 if not self._is_valid_head(commit_id, repo.scm_instance()):
496 496 h.flash(_('You can only delete files with revision '
497 497 'being a valid branch '), category='warning')
498 498 return redirect(h.url('files_home',
499 499 repo_name=repo_name, revision='tip',
500 500 f_path=f_path))
501 501
502 502 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
503 503 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
504 504
505 505 c.default_message = _(
506 506 'Deleted file %s via RhodeCode Enterprise') % (f_path)
507 507 c.f_path = f_path
508 508
509 509 return render('files/files_delete.mako')
510 510
511 511 @CSRFRequired()
512 512 @LoginRequired()
513 513 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
514 514 def edit(self, repo_name, revision, f_path):
515 515 commit_id = revision
516 516
517 517 repo = c.rhodecode_db_repo
518 518 if repo.enable_locking and repo.locked[0]:
519 519 h.flash(_('This repository has been locked by %s on %s')
520 520 % (h.person_by_id(repo.locked[0]),
521 521 h.format_date(h.time_to_datetime(repo.locked[1]))),
522 522 'warning')
523 523 return redirect(h.url('files_home',
524 524 repo_name=repo_name, revision='tip'))
525 525
526 526 if not self._is_valid_head(commit_id, repo.scm_instance()):
527 527 h.flash(_('You can only edit files with revision '
528 528 'being a valid branch '), category='warning')
529 529 return redirect(h.url('files_home',
530 530 repo_name=repo_name, revision='tip',
531 531 f_path=f_path))
532 532
533 533 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
534 534 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
535 535
536 536 if c.file.is_binary:
537 537 return redirect(url('files_home', repo_name=c.repo_name,
538 538 revision=c.commit.raw_id, f_path=f_path))
539 539 c.default_message = _(
540 540 'Edited file %s via RhodeCode Enterprise') % (f_path)
541 541 c.f_path = f_path
542 542 old_content = c.file.content
543 543 sl = old_content.splitlines(1)
544 544 first_line = sl[0] if sl else ''
545 545
546 546 # modes: 0 - Unix, 1 - Mac, 2 - DOS
547 547 mode = detect_mode(first_line, 0)
548 548 content = convert_line_endings(request.POST.get('content', ''), mode)
549 549
550 550 message = request.POST.get('message') or c.default_message
551 551 org_f_path = c.file.unicode_path
552 552 filename = request.POST['filename']
553 553 org_filename = c.file.name
554 554
555 555 if content == old_content and filename == org_filename:
556 556 h.flash(_('No changes'), category='warning')
557 557 return redirect(url('changeset_home', repo_name=c.repo_name,
558 558 revision='tip'))
559 559 try:
560 560 mapping = {
561 561 org_f_path: {
562 562 'org_filename': org_f_path,
563 563 'filename': os.path.join(c.file.dir_path, filename),
564 564 'content': content,
565 565 'lexer': '',
566 566 'op': 'mod',
567 567 }
568 568 }
569 569
570 570 ScmModel().update_nodes(
571 571 user=c.rhodecode_user.user_id,
572 572 repo=c.rhodecode_db_repo,
573 573 message=message,
574 574 nodes=mapping,
575 575 parent_commit=c.commit,
576 576 )
577 577
578 578 h.flash(_('Successfully committed to %s') % f_path,
579 579 category='success')
580 580 except Exception:
581 581 msg = _('Error occurred during commit')
582 582 log.exception(msg)
583 583 h.flash(msg, category='error')
584 584 return redirect(url('changeset_home',
585 585 repo_name=c.repo_name, revision='tip'))
586 586
587 587 @LoginRequired()
588 588 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
589 589 def edit_home(self, repo_name, revision, f_path):
590 590 commit_id = revision
591 591
592 592 repo = c.rhodecode_db_repo
593 593 if repo.enable_locking and repo.locked[0]:
594 594 h.flash(_('This repository has been locked by %s on %s')
595 595 % (h.person_by_id(repo.locked[0]),
596 596 h.format_date(h.time_to_datetime(repo.locked[1]))),
597 597 'warning')
598 598 return redirect(h.url('files_home',
599 599 repo_name=repo_name, revision='tip'))
600 600
601 601 if not self._is_valid_head(commit_id, repo.scm_instance()):
602 602 h.flash(_('You can only edit files with revision '
603 603 'being a valid branch '), category='warning')
604 604 return redirect(h.url('files_home',
605 605 repo_name=repo_name, revision='tip',
606 606 f_path=f_path))
607 607
608 608 c.commit = self.__get_commit_or_redirect(commit_id, repo_name)
609 609 c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path)
610 610
611 611 if c.file.is_binary:
612 612 return redirect(url('files_home', repo_name=c.repo_name,
613 613 revision=c.commit.raw_id, f_path=f_path))
614 614 c.default_message = _(
615 615 'Edited file %s via RhodeCode Enterprise') % (f_path)
616 616 c.f_path = f_path
617 617
618 618 return render('files/files_edit.mako')
619 619
620 620 def _is_valid_head(self, commit_id, repo):
621 621 # check if commit is a branch identifier- basically we cannot
622 622 # create multiple heads via file editing
623 623 valid_heads = repo.branches.keys() + repo.branches.values()
624 624
625 625 if h.is_svn(repo) and not repo.is_empty():
626 626 # Note: Subversion only has one head, we add it here in case there
627 627 # is no branch matched.
628 628 valid_heads.append(repo.get_commit(commit_idx=-1).raw_id)
629 629
630 630 # check if commit is a branch name or branch hash
631 631 return commit_id in valid_heads
632 632
633 633 @CSRFRequired()
634 634 @LoginRequired()
635 635 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
636 636 def add(self, repo_name, revision, f_path):
637 637 repo = Repository.get_by_repo_name(repo_name)
638 638 if repo.enable_locking and repo.locked[0]:
639 639 h.flash(_('This repository has been locked by %s on %s')
640 640 % (h.person_by_id(repo.locked[0]),
641 641 h.format_date(h.time_to_datetime(repo.locked[1]))),
642 642 'warning')
643 643 return redirect(h.url('files_home',
644 644 repo_name=repo_name, revision='tip'))
645 645
646 646 r_post = request.POST
647 647
648 648 c.commit = self.__get_commit_or_redirect(
649 649 revision, repo_name, redirect_after=False)
650 650 if c.commit is None:
651 651 c.commit = EmptyCommit(alias=c.rhodecode_repo.alias)
652 652 c.default_message = (_('Added file via RhodeCode Enterprise'))
653 653 c.f_path = f_path
654 654 unix_mode = 0
655 655 content = convert_line_endings(r_post.get('content', ''), unix_mode)
656 656
657 657 message = r_post.get('message') or c.default_message
658 658 filename = r_post.get('filename')
659 659 location = r_post.get('location', '') # dir location
660 660 file_obj = r_post.get('upload_file', None)
661 661
662 662 if file_obj is not None and hasattr(file_obj, 'filename'):
663 663 filename = r_post.get('filename_upload')
664 664 content = file_obj.file
665 665
666 666 if hasattr(content, 'file'):
667 667 # non posix systems store real file under file attr
668 668 content = content.file
669 669
670 670 # If there's no commit, redirect to repo summary
671 671 if type(c.commit) is EmptyCommit:
672 672 redirect_url = h.route_path('repo_summary', repo_name=c.repo_name)
673 673 else:
674 674 redirect_url = url("changeset_home", repo_name=c.repo_name,
675 675 revision='tip')
676 676
677 677 if not filename:
678 678 h.flash(_('No filename'), category='warning')
679 679 return redirect(redirect_url)
680 680
681 681 # extract the location from filename,
682 682 # allows using foo/bar.txt syntax to create subdirectories
683 683 subdir_loc = filename.rsplit('/', 1)
684 684 if len(subdir_loc) == 2:
685 685 location = os.path.join(location, subdir_loc[0])
686 686
687 687 # strip all crap out of file, just leave the basename
688 688 filename = os.path.basename(filename)
689 689 node_path = os.path.join(location, filename)
690 690 author = c.rhodecode_user.full_contact
691 691
692 692 try:
693 693 nodes = {
694 694 node_path: {
695 695 'content': content
696 696 }
697 697 }
698 698 self.scm_model.create_nodes(
699 699 user=c.rhodecode_user.user_id,
700 700 repo=c.rhodecode_db_repo,
701 701 message=message,
702 702 nodes=nodes,
703 703 parent_commit=c.commit,
704 704 author=author,
705 705 )
706 706
707 707 h.flash(_('Successfully committed to %s') % node_path,
708 708 category='success')
709 709 except NonRelativePathError as e:
710 710 h.flash(_(
711 711 'The location specified must be a relative path and must not '
712 712 'contain .. in the path'), category='warning')
713 713 return redirect(url('changeset_home', repo_name=c.repo_name,
714 714 revision='tip'))
715 715 except (NodeError, NodeAlreadyExistsError) as e:
716 716 h.flash(_(e), category='error')
717 717 except Exception:
718 718 msg = _('Error occurred during commit')
719 719 log.exception(msg)
720 720 h.flash(msg, category='error')
721 721 return redirect(url('changeset_home',
722 722 repo_name=c.repo_name, revision='tip'))
723 723
724 724 @LoginRequired()
725 725 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
726 726 def add_home(self, repo_name, revision, f_path):
727 727
728 728 repo = Repository.get_by_repo_name(repo_name)
729 729 if repo.enable_locking and repo.locked[0]:
730 730 h.flash(_('This repository has been locked by %s on %s')
731 731 % (h.person_by_id(repo.locked[0]),
732 732 h.format_date(h.time_to_datetime(repo.locked[1]))),
733 733 'warning')
734 734 return redirect(h.url('files_home',
735 735 repo_name=repo_name, revision='tip'))
736 736
737 737 c.commit = self.__get_commit_or_redirect(
738 738 revision, repo_name, redirect_after=False)
739 739 if c.commit is None:
740 740 c.commit = EmptyCommit(alias=c.rhodecode_repo.alias)
741 741 c.default_message = (_('Added file via RhodeCode Enterprise'))
742 742 c.f_path = f_path
743 743
744 744 return render('files/files_add.mako')
745 745
746 746 @LoginRequired()
747 747 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
748 748 'repository.admin')
749 749 def archivefile(self, repo_name, fname):
750 750 fileformat = None
751 751 commit_id = None
752 752 ext = None
753 753 subrepos = request.GET.get('subrepos') == 'true'
754 754
755 755 for a_type, ext_data in settings.ARCHIVE_SPECS.items():
756 756 archive_spec = fname.split(ext_data[1])
757 757 if len(archive_spec) == 2 and archive_spec[1] == '':
758 758 fileformat = a_type or ext_data[1]
759 759 commit_id = archive_spec[0]
760 760 ext = ext_data[1]
761 761
762 762 dbrepo = RepoModel().get_by_repo_name(repo_name)
763 763 if not dbrepo.enable_downloads:
764 764 return _('Downloads disabled')
765 765
766 766 try:
767 767 commit = c.rhodecode_repo.get_commit(commit_id)
768 768 content_type = settings.ARCHIVE_SPECS[fileformat][0]
769 769 except CommitDoesNotExistError:
770 770 return _('Unknown revision %s') % commit_id
771 771 except EmptyRepositoryError:
772 772 return _('Empty repository')
773 773 except KeyError:
774 774 return _('Unknown archive type')
775 775
776 776 # archive cache
777 777 from rhodecode import CONFIG
778 778
779 779 archive_name = '%s-%s%s%s' % (
780 780 safe_str(repo_name.replace('/', '_')),
781 781 '-sub' if subrepos else '',
782 782 safe_str(commit.short_id), ext)
783 783
784 784 use_cached_archive = False
785 785 archive_cache_enabled = CONFIG.get(
786 786 'archive_cache_dir') and not request.GET.get('no_cache')
787 787
788 788 if archive_cache_enabled:
789 789 # check if we it's ok to write
790 790 if not os.path.isdir(CONFIG['archive_cache_dir']):
791 791 os.makedirs(CONFIG['archive_cache_dir'])
792 792 cached_archive_path = os.path.join(
793 793 CONFIG['archive_cache_dir'], archive_name)
794 794 if os.path.isfile(cached_archive_path):
795 795 log.debug('Found cached archive in %s', cached_archive_path)
796 796 fd, archive = None, cached_archive_path
797 797 use_cached_archive = True
798 798 else:
799 799 log.debug('Archive %s is not yet cached', archive_name)
800 800
801 801 if not use_cached_archive:
802 802 # generate new archive
803 803 fd, archive = tempfile.mkstemp()
804 804 log.debug('Creating new temp archive in %s' % (archive,))
805 805 try:
806 806 commit.archive_repo(archive, kind=fileformat, subrepos=subrepos)
807 807 except ImproperArchiveTypeError:
808 808 return _('Unknown archive type')
809 809 if archive_cache_enabled:
810 810 # if we generated the archive and we have cache enabled
811 811 # let's use this for future
812 812 log.debug('Storing new archive in %s' % (cached_archive_path,))
813 813 shutil.move(archive, cached_archive_path)
814 814 archive = cached_archive_path
815 815
816 816 # store download action
817 817 audit_logger.store_web(
818 818 action='repo.archive.download',
819 819 action_data={'user_agent': request.user_agent,
820 820 'archive_name': archive_name,
821 821 'archive_spec': fname,
822 822 'archive_cached': use_cached_archive},
823 823 user=c.rhodecode_user,
824 824 repo=dbrepo,
825 825 commit=True
826 826 )
827 827
828 828 response.content_disposition = str(
829 829 'attachment; filename=%s' % archive_name)
830 830 response.content_type = str(content_type)
831 831
832 832 def get_chunked_archive(archive):
833 833 with open(archive, 'rb') as stream:
834 834 while True:
835 835 data = stream.read(16 * 1024)
836 836 if not data:
837 837 if fd: # fd means we used temporary file
838 838 os.close(fd)
839 839 if not archive_cache_enabled:
840 840 log.debug('Destroying temp archive %s', archive)
841 841 os.remove(archive)
842 842 break
843 843 yield data
844 844
845 845 return get_chunked_archive(archive)
846 846
847 847 @LoginRequired()
848 848 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
849 849 'repository.admin')
850 850 def diff(self, repo_name, f_path):
851 851
852 852 c.action = request.GET.get('diff')
853 853 diff1 = request.GET.get('diff1', '')
854 854 diff2 = request.GET.get('diff2', '')
855 855
856 856 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
857 857
858 858 ignore_whitespace = str2bool(request.GET.get('ignorews'))
859 859 line_context = request.GET.get('context', 3)
860 860
861 861 if not any((diff1, diff2)):
862 862 h.flash(
863 863 'Need query parameter "diff1" or "diff2" to generate a diff.',
864 864 category='error')
865 865 raise HTTPBadRequest()
866 866
867 867 if c.action not in ['download', 'raw']:
868 868 # redirect to new view if we render diff
869 869 return redirect(
870 870 url('compare_url', repo_name=repo_name,
871 871 source_ref_type='rev',
872 872 source_ref=diff1,
873 873 target_repo=c.repo_name,
874 874 target_ref_type='rev',
875 875 target_ref=diff2,
876 876 f_path=f_path))
877 877
878 878 try:
879 879 node1 = self._get_file_node(diff1, path1)
880 880 node2 = self._get_file_node(diff2, f_path)
881 881 except (RepositoryError, NodeError):
882 882 log.exception("Exception while trying to get node from repository")
883 883 return redirect(url(
884 884 'files_home', repo_name=c.repo_name, f_path=f_path))
885 885
886 886 if all(isinstance(node.commit, EmptyCommit)
887 887 for node in (node1, node2)):
888 888 raise HTTPNotFound
889 889
890 890 c.commit_1 = node1.commit
891 891 c.commit_2 = node2.commit
892 892
893 893 if c.action == 'download':
894 894 _diff = diffs.get_gitdiff(node1, node2,
895 895 ignore_whitespace=ignore_whitespace,
896 896 context=line_context)
897 897 diff = diffs.DiffProcessor(_diff, format='gitdiff')
898 898
899 899 diff_name = '%s_vs_%s.diff' % (diff1, diff2)
900 900 response.content_type = 'text/plain'
901 901 response.content_disposition = (
902 902 'attachment; filename=%s' % (diff_name,)
903 903 )
904 904 charset = self._get_default_encoding()
905 905 if charset:
906 906 response.charset = charset
907 907 return diff.as_raw()
908 908
909 909 elif c.action == 'raw':
910 910 _diff = diffs.get_gitdiff(node1, node2,
911 911 ignore_whitespace=ignore_whitespace,
912 912 context=line_context)
913 913 diff = diffs.DiffProcessor(_diff, format='gitdiff')
914 914 response.content_type = 'text/plain'
915 915 charset = self._get_default_encoding()
916 916 if charset:
917 917 response.charset = charset
918 918 return diff.as_raw()
919 919
920 920 else:
921 921 return redirect(
922 922 url('compare_url', repo_name=repo_name,
923 923 source_ref_type='rev',
924 924 source_ref=diff1,
925 925 target_repo=c.repo_name,
926 926 target_ref_type='rev',
927 927 target_ref=diff2,
928 928 f_path=f_path))
929 929
930 930 @LoginRequired()
931 931 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
932 932 'repository.admin')
933 933 def diff_2way(self, repo_name, f_path):
934 934 """
935 935 Kept only to make OLD links work
936 936 """
937 937 diff1 = request.GET.get('diff1', '')
938 938 diff2 = request.GET.get('diff2', '')
939 939
940 940 if not any((diff1, diff2)):
941 941 h.flash(
942 942 'Need query parameter "diff1" or "diff2" to generate a diff.',
943 943 category='error')
944 944 raise HTTPBadRequest()
945 945
946 946 return redirect(
947 947 url('compare_url', repo_name=repo_name,
948 948 source_ref_type='rev',
949 949 source_ref=diff1,
950 950 target_repo=c.repo_name,
951 951 target_ref_type='rev',
952 952 target_ref=diff2,
953 953 f_path=f_path,
954 954 diffmode='sideside'))
955 955
956 956 def _get_file_node(self, commit_id, f_path):
957 957 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
958 958 commit = c.rhodecode_repo.get_commit(commit_id=commit_id)
959 959 try:
960 960 node = commit.get_node(f_path)
961 961 if node.is_dir():
962 962 raise NodeError('%s path is a %s not a file'
963 963 % (node, type(node)))
964 964 except NodeDoesNotExistError:
965 965 commit = EmptyCommit(
966 966 commit_id=commit_id,
967 967 idx=commit.idx,
968 968 repo=commit.repository,
969 969 alias=commit.repository.alias,
970 970 message=commit.message,
971 971 author=commit.author,
972 972 date=commit.date)
973 973 node = FileNode(f_path, '', commit=commit)
974 974 else:
975 975 commit = EmptyCommit(
976 976 repo=c.rhodecode_repo,
977 977 alias=c.rhodecode_repo.alias)
978 978 node = FileNode(f_path, '', commit=commit)
979 979 return node
980 980
981 981 def _get_node_history(self, commit, f_path, commits=None):
982 982 """
983 983 get commit history for given node
984 984
985 985 :param commit: commit to calculate history
986 986 :param f_path: path for node to calculate history for
987 987 :param commits: if passed don't calculate history and take
988 988 commits defined in this list
989 989 """
990 990 # calculate history based on tip
991 991 tip = c.rhodecode_repo.get_commit()
992 992 if commits is None:
993 993 pre_load = ["author", "branch"]
994 994 try:
995 995 commits = tip.get_file_history(f_path, pre_load=pre_load)
996 996 except (NodeDoesNotExistError, CommitError):
997 997 # this node is not present at tip!
998 998 commits = commit.get_file_history(f_path, pre_load=pre_load)
999 999
1000 1000 history = []
1001 1001 commits_group = ([], _("Changesets"))
1002 1002 for commit in commits:
1003 1003 branch = ' (%s)' % commit.branch if commit.branch else ''
1004 1004 n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
1005 1005 commits_group[0].append((commit.raw_id, n_desc,))
1006 1006 history.append(commits_group)
1007 1007
1008 1008 symbolic_reference = self._symbolic_reference
1009 1009
1010 1010 if c.rhodecode_repo.alias == 'svn':
1011 1011 adjusted_f_path = self._adjust_file_path_for_svn(
1012 1012 f_path, c.rhodecode_repo)
1013 1013 if adjusted_f_path != f_path:
1014 1014 log.debug(
1015 1015 'Recognized svn tag or branch in file "%s", using svn '
1016 1016 'specific symbolic references', f_path)
1017 1017 f_path = adjusted_f_path
1018 1018 symbolic_reference = self._symbolic_reference_svn
1019 1019
1020 1020 branches = self._create_references(
1021 1021 c.rhodecode_repo.branches, symbolic_reference, f_path)
1022 1022 branches_group = (branches, _("Branches"))
1023 1023
1024 1024 tags = self._create_references(
1025 1025 c.rhodecode_repo.tags, symbolic_reference, f_path)
1026 1026 tags_group = (tags, _("Tags"))
1027 1027
1028 1028 history.append(branches_group)
1029 1029 history.append(tags_group)
1030 1030
1031 1031 return history, commits
1032 1032
1033 1033 def _adjust_file_path_for_svn(self, f_path, repo):
1034 1034 """
1035 1035 Computes the relative path of `f_path`.
1036 1036
1037 1037 This is mainly based on prefix matching of the recognized tags and
1038 1038 branches in the underlying repository.
1039 1039 """
1040 1040 tags_and_branches = itertools.chain(
1041 1041 repo.branches.iterkeys(),
1042 1042 repo.tags.iterkeys())
1043 1043 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
1044 1044
1045 1045 for name in tags_and_branches:
1046 1046 if f_path.startswith(name + '/'):
1047 1047 f_path = vcspath.relpath(f_path, name)
1048 1048 break
1049 1049 return f_path
1050 1050
1051 1051 def _create_references(
1052 1052 self, branches_or_tags, symbolic_reference, f_path):
1053 1053 items = []
1054 1054 for name, commit_id in branches_or_tags.items():
1055 1055 sym_ref = symbolic_reference(commit_id, name, f_path)
1056 1056 items.append((sym_ref, name))
1057 1057 return items
1058 1058
1059 1059 def _symbolic_reference(self, commit_id, name, f_path):
1060 1060 return commit_id
1061 1061
1062 1062 def _symbolic_reference_svn(self, commit_id, name, f_path):
1063 1063 new_f_path = vcspath.join(name, f_path)
1064 1064 return u'%s@%s' % (new_f_path, commit_id)
1065 1065
1066 1066 @LoginRequired()
1067 1067 @XHRRequired()
1068 1068 @HasRepoPermissionAnyDecorator(
1069 1069 'repository.read', 'repository.write', 'repository.admin')
1070 1070 @jsonify
1071 1071 def nodelist(self, repo_name, revision, f_path):
1072 1072 commit = self.__get_commit_or_redirect(revision, repo_name)
1073 1073
1074 1074 metadata = self._get_nodelist_at_commit(
1075 1075 repo_name, commit.raw_id, f_path)
1076 1076 return {'nodes': metadata}
1077 1077
1078 1078 @LoginRequired()
1079 1079 @XHRRequired()
1080 1080 @HasRepoPermissionAnyDecorator(
1081 1081 'repository.read', 'repository.write', 'repository.admin')
1082 1082 def nodetree_full(self, repo_name, commit_id, f_path):
1083 1083 """
1084 1084 Returns rendered html of file tree that contains commit date,
1085 1085 author, revision for the specified combination of
1086 1086 repo, commit_id and file path
1087 1087
1088 1088 :param repo_name: name of the repository
1089 1089 :param commit_id: commit_id of file tree
1090 1090 :param f_path: file path of the requested directory
1091 1091 """
1092 1092
1093 1093 commit = self.__get_commit_or_redirect(commit_id, repo_name)
1094 1094 try:
1095 1095 dir_node = commit.get_node(f_path)
1096 1096 except RepositoryError as e:
1097 1097 return 'error {}'.format(safe_str(e))
1098 1098
1099 1099 if dir_node.is_file():
1100 1100 return ''
1101 1101
1102 1102 c.file = dir_node
1103 1103 c.commit = commit
1104 1104
1105 1105 # using force=True here, make a little trick. We flush the cache and
1106 1106 # compute it using the same key as without full_load, so the fully
1107 1107 # loaded cached tree is now returned instead of partial
1108 1108 return self._get_tree_at_commit(
1109 1109 repo_name, commit.raw_id, dir_node.path, full_load=True,
1110 1110 force=True)
@@ -1,1008 +1,1009 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 pull requests controller for rhodecode for initializing pull requests
23 23 """
24 24 import types
25 25
26 26 import peppercorn
27 27 import formencode
28 28 import logging
29 29 import collections
30 30
31 31 from webob.exc import HTTPNotFound, HTTPForbidden, HTTPBadRequest
32 32 from pylons import request, tmpl_context as c, url
33 33 from pylons.controllers.util import redirect
34 34 from pylons.i18n.translation import _
35 35 from pyramid.threadlocal import get_current_registry
36 36 from sqlalchemy.sql import func
37 37 from sqlalchemy.sql.expression import or_
38 38
39 39 from rhodecode import events
40 40 from rhodecode.lib import auth, diffs, helpers as h, codeblocks
41 41 from rhodecode.lib.ext_json import json
42 42 from rhodecode.lib.base import (
43 43 BaseRepoController, render, vcs_operation_context)
44 44 from rhodecode.lib.auth import (
45 45 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous,
46 46 HasAcceptedRepoType, XHRRequired)
47 47 from rhodecode.lib.channelstream import channelstream_request
48 48 from rhodecode.lib.utils import jsonify
49 49 from rhodecode.lib.utils2 import (
50 50 safe_int, safe_str, str2bool, safe_unicode)
51 51 from rhodecode.lib.vcs.backends.base import (
52 52 EmptyCommit, UpdateFailureReason, EmptyRepository)
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 EmptyRepositoryError, CommitDoesNotExistError, RepositoryRequirementError,
55 55 NodeDoesNotExistError)
56 56
57 57 from rhodecode.model.changeset_status import ChangesetStatusModel
58 58 from rhodecode.model.comment import CommentsModel
59 59 from rhodecode.model.db import (PullRequest, ChangesetStatus, ChangesetComment,
60 60 Repository, PullRequestVersion)
61 61 from rhodecode.model.forms import PullRequestForm
62 62 from rhodecode.model.meta import Session
63 63 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
64 64
65 65 log = logging.getLogger(__name__)
66 66
67 67
68 68 class PullrequestsController(BaseRepoController):
69 69
70 70 def __before__(self):
71 71 super(PullrequestsController, self).__before__()
72 72 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
73 73 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
74 74
75 75 @LoginRequired()
76 76 @NotAnonymous()
77 77 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
78 78 'repository.admin')
79 79 @HasAcceptedRepoType('git', 'hg')
80 80 def index(self):
81 81 source_repo = c.rhodecode_db_repo
82 82
83 83 try:
84 84 source_repo.scm_instance().get_commit()
85 85 except EmptyRepositoryError:
86 86 h.flash(h.literal(_('There are no commits yet')),
87 87 category='warning')
88 88 redirect(h.route_path('repo_summary', repo_name=source_repo.repo_name))
89 89
90 90 commit_id = request.GET.get('commit')
91 91 branch_ref = request.GET.get('branch')
92 92 bookmark_ref = request.GET.get('bookmark')
93 93
94 94 try:
95 95 source_repo_data = PullRequestModel().generate_repo_data(
96 96 source_repo, commit_id=commit_id,
97 97 branch=branch_ref, bookmark=bookmark_ref)
98 98 except CommitDoesNotExistError as e:
99 99 log.exception(e)
100 100 h.flash(_('Commit does not exist'), 'error')
101 101 redirect(url('pullrequest_home', repo_name=source_repo.repo_name))
102 102
103 103 default_target_repo = source_repo
104 104
105 105 if source_repo.parent:
106 106 parent_vcs_obj = source_repo.parent.scm_instance()
107 107 if parent_vcs_obj and not parent_vcs_obj.is_empty():
108 108 # change default if we have a parent repo
109 109 default_target_repo = source_repo.parent
110 110
111 111 target_repo_data = PullRequestModel().generate_repo_data(
112 112 default_target_repo)
113 113
114 114 selected_source_ref = source_repo_data['refs']['selected_ref']
115 115
116 116 title_source_ref = selected_source_ref.split(':', 2)[1]
117 117 c.default_title = PullRequestModel().generate_pullrequest_title(
118 118 source=source_repo.repo_name,
119 119 source_ref=title_source_ref,
120 120 target=default_target_repo.repo_name
121 121 )
122 122
123 123 c.default_repo_data = {
124 124 'source_repo_name': source_repo.repo_name,
125 125 'source_refs_json': json.dumps(source_repo_data),
126 126 'target_repo_name': default_target_repo.repo_name,
127 127 'target_refs_json': json.dumps(target_repo_data),
128 128 }
129 129 c.default_source_ref = selected_source_ref
130 130
131 131 return render('/pullrequests/pullrequest.mako')
132 132
133 133 @LoginRequired()
134 134 @NotAnonymous()
135 135 @XHRRequired()
136 136 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
137 137 'repository.admin')
138 138 @jsonify
139 139 def get_repo_refs(self, repo_name, target_repo_name):
140 140 repo = Repository.get_by_repo_name(target_repo_name)
141 141 if not repo:
142 142 raise HTTPNotFound
143 143 return PullRequestModel().generate_repo_data(repo)
144 144
145 145 @LoginRequired()
146 146 @NotAnonymous()
147 147 @XHRRequired()
148 148 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
149 149 'repository.admin')
150 150 @jsonify
151 151 def get_repo_destinations(self, repo_name):
152 152 repo = Repository.get_by_repo_name(repo_name)
153 153 if not repo:
154 154 raise HTTPNotFound
155 155 filter_query = request.GET.get('query')
156 156
157 157 query = Repository.query() \
158 158 .order_by(func.length(Repository.repo_name)) \
159 159 .filter(or_(
160 160 Repository.repo_name == repo.repo_name,
161 161 Repository.fork_id == repo.repo_id))
162 162
163 163 if filter_query:
164 164 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
165 165 query = query.filter(
166 166 Repository.repo_name.ilike(ilike_expression))
167 167
168 168 add_parent = False
169 169 if repo.parent:
170 170 if filter_query in repo.parent.repo_name:
171 171 parent_vcs_obj = repo.parent.scm_instance()
172 172 if parent_vcs_obj and not parent_vcs_obj.is_empty():
173 173 add_parent = True
174 174
175 175 limit = 20 - 1 if add_parent else 20
176 176 all_repos = query.limit(limit).all()
177 177 if add_parent:
178 178 all_repos += [repo.parent]
179 179
180 180 repos = []
181 181 for obj in self.scm_model.get_repos(all_repos):
182 182 repos.append({
183 183 'id': obj['name'],
184 184 'text': obj['name'],
185 185 'type': 'repo',
186 186 'obj': obj['dbrepo']
187 187 })
188 188
189 189 data = {
190 190 'more': False,
191 191 'results': [{
192 192 'text': _('Repositories'),
193 193 'children': repos
194 194 }] if repos else []
195 195 }
196 196 return data
197 197
198 198 @LoginRequired()
199 199 @NotAnonymous()
200 200 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
201 201 'repository.admin')
202 202 @HasAcceptedRepoType('git', 'hg')
203 203 @auth.CSRFRequired()
204 204 def create(self, repo_name):
205 205 repo = Repository.get_by_repo_name(repo_name)
206 206 if not repo:
207 207 raise HTTPNotFound
208 208
209 209 controls = peppercorn.parse(request.POST.items())
210 210
211 211 try:
212 212 _form = PullRequestForm(repo.repo_id)().to_python(controls)
213 213 except formencode.Invalid as errors:
214 214 if errors.error_dict.get('revisions'):
215 215 msg = 'Revisions: %s' % errors.error_dict['revisions']
216 216 elif errors.error_dict.get('pullrequest_title'):
217 217 msg = _('Pull request requires a title with min. 3 chars')
218 218 else:
219 219 msg = _('Error creating pull request: {}').format(errors)
220 220 log.exception(msg)
221 221 h.flash(msg, 'error')
222 222
223 223 # would rather just go back to form ...
224 224 return redirect(url('pullrequest_home', repo_name=repo_name))
225 225
226 226 source_repo = _form['source_repo']
227 227 source_ref = _form['source_ref']
228 228 target_repo = _form['target_repo']
229 229 target_ref = _form['target_ref']
230 230 commit_ids = _form['revisions'][::-1]
231 231
232 232 # find the ancestor for this pr
233 233 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
234 234 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
235 235
236 236 source_scm = source_db_repo.scm_instance()
237 237 target_scm = target_db_repo.scm_instance()
238 238
239 239 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
240 240 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
241 241
242 242 ancestor = source_scm.get_common_ancestor(
243 243 source_commit.raw_id, target_commit.raw_id, target_scm)
244 244
245 245 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
246 246 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
247 247
248 248 pullrequest_title = _form['pullrequest_title']
249 249 title_source_ref = source_ref.split(':', 2)[1]
250 250 if not pullrequest_title:
251 251 pullrequest_title = PullRequestModel().generate_pullrequest_title(
252 252 source=source_repo,
253 253 source_ref=title_source_ref,
254 254 target=target_repo
255 255 )
256 256
257 257 description = _form['pullrequest_desc']
258 258
259 259 get_default_reviewers_data, validate_default_reviewers = \
260 260 PullRequestModel().get_reviewer_functions()
261 261
262 262 # recalculate reviewers logic, to make sure we can validate this
263 263 reviewer_rules = get_default_reviewers_data(
264 264 c.rhodecode_user.get_instance(), source_db_repo,
265 265 source_commit, target_db_repo, target_commit)
266 266
267 267 given_reviewers = _form['review_members']
268 268 reviewers = validate_default_reviewers(given_reviewers, reviewer_rules)
269 269
270 270 try:
271 271 pull_request = PullRequestModel().create(
272 272 c.rhodecode_user.user_id, source_repo, source_ref, target_repo,
273 273 target_ref, commit_ids, reviewers, pullrequest_title,
274 274 description, reviewer_rules
275 275 )
276 276 Session().commit()
277 277 h.flash(_('Successfully opened new pull request'),
278 278 category='success')
279 279 except Exception as e:
280 280 msg = _('Error occurred during creation of this pull request.')
281 281 log.exception(msg)
282 282 h.flash(msg, category='error')
283 283 return redirect(url('pullrequest_home', repo_name=repo_name))
284 284
285 285 return redirect(url('pullrequest_show', repo_name=target_repo,
286 286 pull_request_id=pull_request.pull_request_id))
287 287
288 288 @LoginRequired()
289 289 @NotAnonymous()
290 290 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
291 291 'repository.admin')
292 292 @auth.CSRFRequired()
293 293 @jsonify
294 294 def update(self, repo_name, pull_request_id):
295 295 pull_request_id = safe_int(pull_request_id)
296 296 pull_request = PullRequest.get_or_404(pull_request_id)
297 297 # only owner or admin can update it
298 298 allowed_to_update = PullRequestModel().check_user_update(
299 299 pull_request, c.rhodecode_user)
300 300 if allowed_to_update:
301 301 controls = peppercorn.parse(request.POST.items())
302 302
303 303 if 'review_members' in controls:
304 304 self._update_reviewers(
305 305 pull_request_id, controls['review_members'],
306 306 pull_request.reviewer_data)
307 307 elif str2bool(request.POST.get('update_commits', 'false')):
308 308 self._update_commits(pull_request)
309 309 elif str2bool(request.POST.get('edit_pull_request', 'false')):
310 310 self._edit_pull_request(pull_request)
311 311 else:
312 312 raise HTTPBadRequest()
313 313 return True
314 314 raise HTTPForbidden()
315 315
316 316 def _edit_pull_request(self, pull_request):
317 317 try:
318 318 PullRequestModel().edit(
319 319 pull_request, request.POST.get('title'),
320 request.POST.get('description'))
320 request.POST.get('description'), c.rhodecode_user)
321 321 except ValueError:
322 322 msg = _(u'Cannot update closed pull requests.')
323 323 h.flash(msg, category='error')
324 324 return
325 325 else:
326 326 Session().commit()
327 327
328 328 msg = _(u'Pull request title & description updated.')
329 329 h.flash(msg, category='success')
330 330 return
331 331
332 332 def _update_commits(self, pull_request):
333 333 resp = PullRequestModel().update_commits(pull_request)
334 334
335 335 if resp.executed:
336 336
337 337 if resp.target_changed and resp.source_changed:
338 338 changed = 'target and source repositories'
339 339 elif resp.target_changed and not resp.source_changed:
340 340 changed = 'target repository'
341 341 elif not resp.target_changed and resp.source_changed:
342 342 changed = 'source repository'
343 343 else:
344 344 changed = 'nothing'
345 345
346 346 msg = _(
347 347 u'Pull request updated to "{source_commit_id}" with '
348 348 u'{count_added} added, {count_removed} removed commits. '
349 349 u'Source of changes: {change_source}')
350 350 msg = msg.format(
351 351 source_commit_id=pull_request.source_ref_parts.commit_id,
352 352 count_added=len(resp.changes.added),
353 353 count_removed=len(resp.changes.removed),
354 354 change_source=changed)
355 355 h.flash(msg, category='success')
356 356
357 357 registry = get_current_registry()
358 358 rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {})
359 359 channelstream_config = rhodecode_plugins.get('channelstream', {})
360 360 if channelstream_config.get('enabled'):
361 361 message = msg + (
362 362 ' - <a onclick="window.location.reload()">'
363 363 '<strong>{}</strong></a>'.format(_('Reload page')))
364 364 channel = '/repo${}$/pr/{}'.format(
365 365 pull_request.target_repo.repo_name,
366 366 pull_request.pull_request_id
367 367 )
368 368 payload = {
369 369 'type': 'message',
370 370 'user': 'system',
371 371 'exclude_users': [request.user.username],
372 372 'channel': channel,
373 373 'message': {
374 374 'message': message,
375 375 'level': 'success',
376 376 'topic': '/notifications'
377 377 }
378 378 }
379 379 channelstream_request(
380 380 channelstream_config, [payload], '/message',
381 381 raise_exc=False)
382 382 else:
383 383 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
384 384 warning_reasons = [
385 385 UpdateFailureReason.NO_CHANGE,
386 386 UpdateFailureReason.WRONG_REF_TYPE,
387 387 ]
388 388 category = 'warning' if resp.reason in warning_reasons else 'error'
389 389 h.flash(msg, category=category)
390 390
391 391 @auth.CSRFRequired()
392 392 @LoginRequired()
393 393 @NotAnonymous()
394 394 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
395 395 'repository.admin')
396 396 def merge(self, repo_name, pull_request_id):
397 397 """
398 398 POST /{repo_name}/pull-request/{pull_request_id}
399 399
400 400 Merge will perform a server-side merge of the specified
401 401 pull request, if the pull request is approved and mergeable.
402 402 After successful merging, the pull request is automatically
403 403 closed, with a relevant comment.
404 404 """
405 405 pull_request_id = safe_int(pull_request_id)
406 406 pull_request = PullRequest.get_or_404(pull_request_id)
407 407 user = c.rhodecode_user
408 408
409 409 check = MergeCheck.validate(pull_request, user)
410 410 merge_possible = not check.failed
411 411
412 412 for err_type, error_msg in check.errors:
413 413 h.flash(error_msg, category=err_type)
414 414
415 415 if merge_possible:
416 416 log.debug("Pre-conditions checked, trying to merge.")
417 417 extras = vcs_operation_context(
418 418 request.environ, repo_name=pull_request.target_repo.repo_name,
419 419 username=user.username, action='push',
420 420 scm=pull_request.target_repo.repo_type)
421 421 self._merge_pull_request(pull_request, user, extras)
422 422
423 423 return redirect(url(
424 424 'pullrequest_show',
425 425 repo_name=pull_request.target_repo.repo_name,
426 426 pull_request_id=pull_request.pull_request_id))
427 427
428 428 def _merge_pull_request(self, pull_request, user, extras):
429 429 merge_resp = PullRequestModel().merge(
430 430 pull_request, user, extras=extras)
431 431
432 432 if merge_resp.executed:
433 433 log.debug("The merge was successful, closing the pull request.")
434 434 PullRequestModel().close_pull_request(
435 435 pull_request.pull_request_id, user)
436 436 Session().commit()
437 437 msg = _('Pull request was successfully merged and closed.')
438 438 h.flash(msg, category='success')
439 439 else:
440 440 log.debug(
441 441 "The merge was not successful. Merge response: %s",
442 442 merge_resp)
443 443 msg = PullRequestModel().merge_status_message(
444 444 merge_resp.failure_reason)
445 445 h.flash(msg, category='error')
446 446
447 447 def _update_reviewers(self, pull_request_id, review_members, reviewer_rules):
448 448
449 449 get_default_reviewers_data, validate_default_reviewers = \
450 450 PullRequestModel().get_reviewer_functions()
451 451
452 452 try:
453 453 reviewers = validate_default_reviewers(review_members, reviewer_rules)
454 454 except ValueError as e:
455 455 log.error('Reviewers Validation: {}'.format(e))
456 456 h.flash(e, category='error')
457 457 return
458 458
459 PullRequestModel().update_reviewers(pull_request_id, reviewers)
459 PullRequestModel().update_reviewers(
460 pull_request_id, reviewers, c.rhodecode_user)
460 461 h.flash(_('Pull request reviewers updated.'), category='success')
461 462 Session().commit()
462 463
463 464 @LoginRequired()
464 465 @NotAnonymous()
465 466 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
466 467 'repository.admin')
467 468 @auth.CSRFRequired()
468 469 @jsonify
469 470 def delete(self, repo_name, pull_request_id):
470 471 pull_request_id = safe_int(pull_request_id)
471 472 pull_request = PullRequest.get_or_404(pull_request_id)
472 473
473 474 pr_closed = pull_request.is_closed()
474 475 allowed_to_delete = PullRequestModel().check_user_delete(
475 476 pull_request, c.rhodecode_user) and not pr_closed
476 477
477 478 # only owner can delete it !
478 479 if allowed_to_delete:
479 PullRequestModel().delete(pull_request)
480 PullRequestModel().delete(pull_request, c.rhodecode_user)
480 481 Session().commit()
481 482 h.flash(_('Successfully deleted pull request'),
482 483 category='success')
483 484 return redirect(url('my_account_pullrequests'))
484 485
485 486 h.flash(_('Your are not allowed to delete this pull request'),
486 487 category='error')
487 488 raise HTTPForbidden()
488 489
489 490 def _get_pr_version(self, pull_request_id, version=None):
490 491 pull_request_id = safe_int(pull_request_id)
491 492 at_version = None
492 493
493 494 if version and version == 'latest':
494 495 pull_request_ver = PullRequest.get(pull_request_id)
495 496 pull_request_obj = pull_request_ver
496 497 _org_pull_request_obj = pull_request_obj
497 498 at_version = 'latest'
498 499 elif version:
499 500 pull_request_ver = PullRequestVersion.get_or_404(version)
500 501 pull_request_obj = pull_request_ver
501 502 _org_pull_request_obj = pull_request_ver.pull_request
502 503 at_version = pull_request_ver.pull_request_version_id
503 504 else:
504 505 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
505 506 pull_request_id)
506 507
507 508 pull_request_display_obj = PullRequest.get_pr_display_object(
508 509 pull_request_obj, _org_pull_request_obj)
509 510
510 511 return _org_pull_request_obj, pull_request_obj, \
511 512 pull_request_display_obj, at_version
512 513
513 514 def _get_diffset(
514 515 self, source_repo, source_ref_id, target_ref_id, target_commit,
515 516 source_commit, diff_limit, file_limit, display_inline_comments):
516 517 vcs_diff = PullRequestModel().get_diff(
517 518 source_repo, source_ref_id, target_ref_id)
518 519
519 520 diff_processor = diffs.DiffProcessor(
520 521 vcs_diff, format='newdiff', diff_limit=diff_limit,
521 522 file_limit=file_limit, show_full_diff=c.fulldiff)
522 523
523 524 _parsed = diff_processor.prepare()
524 525
525 526 def _node_getter(commit):
526 527 def get_node(fname):
527 528 try:
528 529 return commit.get_node(fname)
529 530 except NodeDoesNotExistError:
530 531 return None
531 532
532 533 return get_node
533 534
534 535 diffset = codeblocks.DiffSet(
535 536 repo_name=c.repo_name,
536 537 source_repo_name=c.source_repo.repo_name,
537 538 source_node_getter=_node_getter(target_commit),
538 539 target_node_getter=_node_getter(source_commit),
539 540 comments=display_inline_comments
540 541 )
541 542 diffset = diffset.render_patchset(
542 543 _parsed, target_commit.raw_id, source_commit.raw_id)
543 544
544 545 return diffset
545 546
546 547 @LoginRequired()
547 548 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
548 549 'repository.admin')
549 550 def show(self, repo_name, pull_request_id):
550 551 pull_request_id = safe_int(pull_request_id)
551 552 version = request.GET.get('version')
552 553 from_version = request.GET.get('from_version') or version
553 554 merge_checks = request.GET.get('merge_checks')
554 555 c.fulldiff = str2bool(request.GET.get('fulldiff'))
555 556
556 557 (pull_request_latest,
557 558 pull_request_at_ver,
558 559 pull_request_display_obj,
559 560 at_version) = self._get_pr_version(
560 561 pull_request_id, version=version)
561 562 pr_closed = pull_request_latest.is_closed()
562 563
563 564 if pr_closed and (version or from_version):
564 565 # not allow to browse versions
565 566 return redirect(h.url('pullrequest_show', repo_name=repo_name,
566 567 pull_request_id=pull_request_id))
567 568
568 569 versions = pull_request_display_obj.versions()
569 570
570 571 c.at_version = at_version
571 572 c.at_version_num = (at_version
572 573 if at_version and at_version != 'latest'
573 574 else None)
574 575 c.at_version_pos = ChangesetComment.get_index_from_version(
575 576 c.at_version_num, versions)
576 577
577 578 (prev_pull_request_latest,
578 579 prev_pull_request_at_ver,
579 580 prev_pull_request_display_obj,
580 581 prev_at_version) = self._get_pr_version(
581 582 pull_request_id, version=from_version)
582 583
583 584 c.from_version = prev_at_version
584 585 c.from_version_num = (prev_at_version
585 586 if prev_at_version and prev_at_version != 'latest'
586 587 else None)
587 588 c.from_version_pos = ChangesetComment.get_index_from_version(
588 589 c.from_version_num, versions)
589 590
590 591 # define if we're in COMPARE mode or VIEW at version mode
591 592 compare = at_version != prev_at_version
592 593
593 594 # pull_requests repo_name we opened it against
594 595 # ie. target_repo must match
595 596 if repo_name != pull_request_at_ver.target_repo.repo_name:
596 597 raise HTTPNotFound
597 598
598 599 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
599 600 pull_request_at_ver)
600 601
601 602 c.pull_request = pull_request_display_obj
602 603 c.pull_request_latest = pull_request_latest
603 604
604 605 if compare or (at_version and not at_version == 'latest'):
605 606 c.allowed_to_change_status = False
606 607 c.allowed_to_update = False
607 608 c.allowed_to_merge = False
608 609 c.allowed_to_delete = False
609 610 c.allowed_to_comment = False
610 611 c.allowed_to_close = False
611 612 else:
612 613 can_change_status = PullRequestModel().check_user_change_status(
613 614 pull_request_at_ver, c.rhodecode_user)
614 615 c.allowed_to_change_status = can_change_status and not pr_closed
615 616
616 617 c.allowed_to_update = PullRequestModel().check_user_update(
617 618 pull_request_latest, c.rhodecode_user) and not pr_closed
618 619 c.allowed_to_merge = PullRequestModel().check_user_merge(
619 620 pull_request_latest, c.rhodecode_user) and not pr_closed
620 621 c.allowed_to_delete = PullRequestModel().check_user_delete(
621 622 pull_request_latest, c.rhodecode_user) and not pr_closed
622 623 c.allowed_to_comment = not pr_closed
623 624 c.allowed_to_close = c.allowed_to_merge and not pr_closed
624 625
625 626 c.forbid_adding_reviewers = False
626 627 c.forbid_author_to_review = False
627 628 c.forbid_commit_author_to_review = False
628 629
629 630 if pull_request_latest.reviewer_data and \
630 631 'rules' in pull_request_latest.reviewer_data:
631 632 rules = pull_request_latest.reviewer_data['rules'] or {}
632 633 try:
633 634 c.forbid_adding_reviewers = rules.get(
634 635 'forbid_adding_reviewers')
635 636 c.forbid_author_to_review = rules.get(
636 637 'forbid_author_to_review')
637 638 c.forbid_commit_author_to_review = rules.get(
638 639 'forbid_commit_author_to_review')
639 640 except Exception:
640 641 pass
641 642
642 643 # check merge capabilities
643 644 _merge_check = MergeCheck.validate(
644 645 pull_request_latest, user=c.rhodecode_user)
645 646 c.pr_merge_errors = _merge_check.error_details
646 647 c.pr_merge_possible = not _merge_check.failed
647 648 c.pr_merge_message = _merge_check.merge_msg
648 649
649 650 c.pull_request_review_status = _merge_check.review_status
650 651 if merge_checks:
651 652 return render('/pullrequests/pullrequest_merge_checks.mako')
652 653
653 654 comments_model = CommentsModel()
654 655
655 656 # reviewers and statuses
656 657 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
657 658 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
658 659
659 660 # GENERAL COMMENTS with versions #
660 661 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
661 662 q = q.order_by(ChangesetComment.comment_id.asc())
662 663 general_comments = q
663 664
664 665 # pick comments we want to render at current version
665 666 c.comment_versions = comments_model.aggregate_comments(
666 667 general_comments, versions, c.at_version_num)
667 668 c.comments = c.comment_versions[c.at_version_num]['until']
668 669
669 670 # INLINE COMMENTS with versions #
670 671 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
671 672 q = q.order_by(ChangesetComment.comment_id.asc())
672 673 inline_comments = q
673 674
674 675 c.inline_versions = comments_model.aggregate_comments(
675 676 inline_comments, versions, c.at_version_num, inline=True)
676 677
677 678 # inject latest version
678 679 latest_ver = PullRequest.get_pr_display_object(
679 680 pull_request_latest, pull_request_latest)
680 681
681 682 c.versions = versions + [latest_ver]
682 683
683 684 # if we use version, then do not show later comments
684 685 # than current version
685 686 display_inline_comments = collections.defaultdict(
686 687 lambda: collections.defaultdict(list))
687 688 for co in inline_comments:
688 689 if c.at_version_num:
689 690 # pick comments that are at least UPTO given version, so we
690 691 # don't render comments for higher version
691 692 should_render = co.pull_request_version_id and \
692 693 co.pull_request_version_id <= c.at_version_num
693 694 else:
694 695 # showing all, for 'latest'
695 696 should_render = True
696 697
697 698 if should_render:
698 699 display_inline_comments[co.f_path][co.line_no].append(co)
699 700
700 701 # load diff data into template context, if we use compare mode then
701 702 # diff is calculated based on changes between versions of PR
702 703
703 704 source_repo = pull_request_at_ver.source_repo
704 705 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
705 706
706 707 target_repo = pull_request_at_ver.target_repo
707 708 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
708 709
709 710 if compare:
710 711 # in compare switch the diff base to latest commit from prev version
711 712 target_ref_id = prev_pull_request_display_obj.revisions[0]
712 713
713 714 # despite opening commits for bookmarks/branches/tags, we always
714 715 # convert this to rev to prevent changes after bookmark or branch change
715 716 c.source_ref_type = 'rev'
716 717 c.source_ref = source_ref_id
717 718
718 719 c.target_ref_type = 'rev'
719 720 c.target_ref = target_ref_id
720 721
721 722 c.source_repo = source_repo
722 723 c.target_repo = target_repo
723 724
724 725 # diff_limit is the old behavior, will cut off the whole diff
725 726 # if the limit is applied otherwise will just hide the
726 727 # big files from the front-end
727 728 diff_limit = self.cut_off_limit_diff
728 729 file_limit = self.cut_off_limit_file
729 730
730 731 c.commit_ranges = []
731 732 source_commit = EmptyCommit()
732 733 target_commit = EmptyCommit()
733 734 c.missing_requirements = False
734 735
735 736 source_scm = source_repo.scm_instance()
736 737 target_scm = target_repo.scm_instance()
737 738
738 739 # try first shadow repo, fallback to regular repo
739 740 try:
740 741 commits_source_repo = pull_request_latest.get_shadow_repo()
741 742 except Exception:
742 743 log.debug('Failed to get shadow repo', exc_info=True)
743 744 commits_source_repo = source_scm
744 745
745 746 c.commits_source_repo = commits_source_repo
746 747 commit_cache = {}
747 748 try:
748 749 pre_load = ["author", "branch", "date", "message"]
749 750 show_revs = pull_request_at_ver.revisions
750 751 for rev in show_revs:
751 752 comm = commits_source_repo.get_commit(
752 753 commit_id=rev, pre_load=pre_load)
753 754 c.commit_ranges.append(comm)
754 755 commit_cache[comm.raw_id] = comm
755 756
756 757 # Order here matters, we first need to get target, and then
757 758 # the source
758 759 target_commit = commits_source_repo.get_commit(
759 760 commit_id=safe_str(target_ref_id))
760 761
761 762 source_commit = commits_source_repo.get_commit(
762 763 commit_id=safe_str(source_ref_id))
763 764
764 765 except CommitDoesNotExistError:
765 766 log.warning(
766 767 'Failed to get commit from `{}` repo'.format(
767 768 commits_source_repo), exc_info=True)
768 769 except RepositoryRequirementError:
769 770 log.warning(
770 771 'Failed to get all required data from repo', exc_info=True)
771 772 c.missing_requirements = True
772 773
773 774 c.ancestor = None # set it to None, to hide it from PR view
774 775
775 776 try:
776 777 ancestor_id = source_scm.get_common_ancestor(
777 778 source_commit.raw_id, target_commit.raw_id, target_scm)
778 779 c.ancestor_commit = source_scm.get_commit(ancestor_id)
779 780 except Exception:
780 781 c.ancestor_commit = None
781 782
782 783 c.statuses = source_repo.statuses(
783 784 [x.raw_id for x in c.commit_ranges])
784 785
785 786 # auto collapse if we have more than limit
786 787 collapse_limit = diffs.DiffProcessor._collapse_commits_over
787 788 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
788 789 c.compare_mode = compare
789 790
790 791 c.missing_commits = False
791 792 if (c.missing_requirements or isinstance(source_commit, EmptyCommit)
792 793 or source_commit == target_commit):
793 794
794 795 c.missing_commits = True
795 796 else:
796 797
797 798 c.diffset = self._get_diffset(
798 799 commits_source_repo, source_ref_id, target_ref_id,
799 800 target_commit, source_commit,
800 801 diff_limit, file_limit, display_inline_comments)
801 802
802 803 c.limited_diff = c.diffset.limited_diff
803 804
804 805 # calculate removed files that are bound to comments
805 806 comment_deleted_files = [
806 807 fname for fname in display_inline_comments
807 808 if fname not in c.diffset.file_stats]
808 809
809 810 c.deleted_files_comments = collections.defaultdict(dict)
810 811 for fname, per_line_comments in display_inline_comments.items():
811 812 if fname in comment_deleted_files:
812 813 c.deleted_files_comments[fname]['stats'] = 0
813 814 c.deleted_files_comments[fname]['comments'] = list()
814 815 for lno, comments in per_line_comments.items():
815 816 c.deleted_files_comments[fname]['comments'].extend(
816 817 comments)
817 818
818 819 # this is a hack to properly display links, when creating PR, the
819 820 # compare view and others uses different notation, and
820 821 # compare_commits.mako renders links based on the target_repo.
821 822 # We need to swap that here to generate it properly on the html side
822 823 c.target_repo = c.source_repo
823 824
824 825 c.commit_statuses = ChangesetStatus.STATUSES
825 826
826 827 c.show_version_changes = not pr_closed
827 828 if c.show_version_changes:
828 829 cur_obj = pull_request_at_ver
829 830 prev_obj = prev_pull_request_at_ver
830 831
831 832 old_commit_ids = prev_obj.revisions
832 833 new_commit_ids = cur_obj.revisions
833 834 commit_changes = PullRequestModel()._calculate_commit_id_changes(
834 835 old_commit_ids, new_commit_ids)
835 836 c.commit_changes_summary = commit_changes
836 837
837 838 # calculate the diff for commits between versions
838 839 c.commit_changes = []
839 840 mark = lambda cs, fw: list(
840 841 h.itertools.izip_longest([], cs, fillvalue=fw))
841 842 for c_type, raw_id in mark(commit_changes.added, 'a') \
842 843 + mark(commit_changes.removed, 'r') \
843 844 + mark(commit_changes.common, 'c'):
844 845
845 846 if raw_id in commit_cache:
846 847 commit = commit_cache[raw_id]
847 848 else:
848 849 try:
849 850 commit = commits_source_repo.get_commit(raw_id)
850 851 except CommitDoesNotExistError:
851 852 # in case we fail extracting still use "dummy" commit
852 853 # for display in commit diff
853 854 commit = h.AttributeDict(
854 855 {'raw_id': raw_id,
855 856 'message': 'EMPTY or MISSING COMMIT'})
856 857 c.commit_changes.append([c_type, commit])
857 858
858 859 # current user review statuses for each version
859 860 c.review_versions = {}
860 861 if c.rhodecode_user.user_id in allowed_reviewers:
861 862 for co in general_comments:
862 863 if co.author.user_id == c.rhodecode_user.user_id:
863 864 # each comment has a status change
864 865 status = co.status_change
865 866 if status:
866 867 _ver_pr = status[0].comment.pull_request_version_id
867 868 c.review_versions[_ver_pr] = status[0]
868 869
869 870 return render('/pullrequests/pullrequest_show.mako')
870 871
871 872 @LoginRequired()
872 873 @NotAnonymous()
873 874 @HasRepoPermissionAnyDecorator(
874 875 'repository.read', 'repository.write', 'repository.admin')
875 876 @auth.CSRFRequired()
876 877 @jsonify
877 878 def comment(self, repo_name, pull_request_id):
878 879 pull_request_id = safe_int(pull_request_id)
879 880 pull_request = PullRequest.get_or_404(pull_request_id)
880 881 if pull_request.is_closed():
881 882 log.debug('comment: forbidden because pull request is closed')
882 883 raise HTTPForbidden()
883 884
884 885 status = request.POST.get('changeset_status', None)
885 886 text = request.POST.get('text')
886 887 comment_type = request.POST.get('comment_type')
887 888 resolves_comment_id = request.POST.get('resolves_comment_id', None)
888 889 close_pull_request = request.POST.get('close_pull_request')
889 890
890 891 # the logic here should work like following, if we submit close
891 892 # pr comment, use `close_pull_request_with_comment` function
892 893 # else handle regular comment logic
893 894 user = c.rhodecode_user
894 895 repo = c.rhodecode_db_repo
895 896
896 897 if close_pull_request:
897 898 # only owner or admin or person with write permissions
898 899 allowed_to_close = PullRequestModel().check_user_update(
899 900 pull_request, c.rhodecode_user)
900 901 if not allowed_to_close:
901 902 log.debug('comment: forbidden because not allowed to close '
902 903 'pull request %s', pull_request_id)
903 904 raise HTTPForbidden()
904 905 comment, status = PullRequestModel().close_pull_request_with_comment(
905 906 pull_request, user, repo, message=text)
906 907 Session().flush()
907 908 events.trigger(
908 909 events.PullRequestCommentEvent(pull_request, comment))
909 910
910 911 else:
911 912 # regular comment case, could be inline, or one with status.
912 913 # for that one we check also permissions
913 914
914 915 allowed_to_change_status = PullRequestModel().check_user_change_status(
915 916 pull_request, c.rhodecode_user)
916 917
917 918 if status and allowed_to_change_status:
918 919 message = (_('Status change %(transition_icon)s %(status)s')
919 920 % {'transition_icon': '>',
920 921 'status': ChangesetStatus.get_status_lbl(status)})
921 922 text = text or message
922 923
923 924 comment = CommentsModel().create(
924 925 text=text,
925 926 repo=c.rhodecode_db_repo.repo_id,
926 927 user=c.rhodecode_user.user_id,
927 928 pull_request=pull_request_id,
928 929 f_path=request.POST.get('f_path'),
929 930 line_no=request.POST.get('line'),
930 931 status_change=(ChangesetStatus.get_status_lbl(status)
931 932 if status and allowed_to_change_status else None),
932 933 status_change_type=(status
933 934 if status and allowed_to_change_status else None),
934 935 comment_type=comment_type,
935 936 resolves_comment_id=resolves_comment_id
936 937 )
937 938
938 939 if allowed_to_change_status:
939 940 # calculate old status before we change it
940 941 old_calculated_status = pull_request.calculated_review_status()
941 942
942 943 # get status if set !
943 944 if status:
944 945 ChangesetStatusModel().set_status(
945 946 c.rhodecode_db_repo.repo_id,
946 947 status,
947 948 c.rhodecode_user.user_id,
948 949 comment,
949 950 pull_request=pull_request_id
950 951 )
951 952
952 953 Session().flush()
953 954 events.trigger(
954 955 events.PullRequestCommentEvent(pull_request, comment))
955 956
956 957 # we now calculate the status of pull request, and based on that
957 958 # calculation we set the commits status
958 959 calculated_status = pull_request.calculated_review_status()
959 960 if old_calculated_status != calculated_status:
960 961 PullRequestModel()._trigger_pull_request_hook(
961 962 pull_request, c.rhodecode_user, 'review_status_change')
962 963
963 964 Session().commit()
964 965
965 966 if not request.is_xhr:
966 967 return redirect(h.url('pullrequest_show', repo_name=repo_name,
967 968 pull_request_id=pull_request_id))
968 969
969 970 data = {
970 971 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
971 972 }
972 973 if comment:
973 974 c.co = comment
974 975 rendered_comment = render('changeset/changeset_comment_block.mako')
975 976 data.update(comment.get_dict())
976 977 data.update({'rendered_text': rendered_comment})
977 978
978 979 return data
979 980
980 981 @LoginRequired()
981 982 @NotAnonymous()
982 983 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
983 984 'repository.admin')
984 985 @auth.CSRFRequired()
985 986 @jsonify
986 987 def delete_comment(self, repo_name, comment_id):
987 988 return self._delete_comment(comment_id)
988 989
989 990 def _delete_comment(self, comment_id):
990 991 comment_id = safe_int(comment_id)
991 992 co = ChangesetComment.get_or_404(comment_id)
992 993 if co.pull_request.is_closed():
993 994 # don't allow deleting comments on closed pull request
994 995 raise HTTPForbidden()
995 996
996 997 is_owner = co.author.user_id == c.rhodecode_user.user_id
997 998 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name)
998 999 if h.HasPermissionAny('hg.admin')() or is_repo_admin or is_owner:
999 1000 old_calculated_status = co.pull_request.calculated_review_status()
1000 CommentsModel().delete(comment=co)
1001 CommentsModel().delete(comment=co, user=c.rhodecode_user)
1001 1002 Session().commit()
1002 1003 calculated_status = co.pull_request.calculated_review_status()
1003 1004 if old_calculated_status != calculated_status:
1004 1005 PullRequestModel()._trigger_pull_request_hook(
1005 1006 co.pull_request, c.rhodecode_user, 'review_status_change')
1006 1007 return True
1007 1008 else:
1008 1009 raise HTTPForbidden()
@@ -1,240 +1,257 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2017-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import datetime
23 23
24 24 from rhodecode.model import meta
25 25 from rhodecode.model.db import User, UserLog, Repository
26 26
27 27
28 28 log = logging.getLogger(__name__)
29 29
30 30 # action as key, and expected action_data as value
31 ACTIONS = {
31 ACTIONS_V1 = {
32 32 'user.login.success': {'user_agent': ''},
33 33 'user.login.failure': {'user_agent': ''},
34 34 'user.logout': {'user_agent': ''},
35 35 'user.password.reset_request': {},
36 36 'user.push': {'user_agent': '', 'commit_ids': []},
37 37 'user.pull': {'user_agent': ''},
38 38
39 39 'user.create': {'data': {}},
40 40 'user.delete': {'old_data': {}},
41 41 'user.edit': {'old_data': {}},
42 42 'user.edit.permissions': {},
43 43 'user.edit.ip.add': {},
44 44 'user.edit.ip.delete': {},
45 45 'user.edit.token.add': {},
46 46 'user.edit.token.delete': {},
47 47 'user.edit.email.add': {},
48 48 'user.edit.email.delete': {},
49 49 'user.edit.password_reset.enabled': {},
50 50 'user.edit.password_reset.disabled': {},
51 51
52 52 'user_group.create': {'data': {}},
53 53 'user_group.delete': {'old_data': {}},
54 54 'user_group.edit': {'old_data': {}},
55 55 'user_group.edit.permissions': {},
56 56 'user_group.edit.member.add': {},
57 57 'user_group.edit.member.delete': {},
58 58
59 59 'repo.create': {'data': {}},
60 60 'repo.fork': {'data': {}},
61 61 'repo.edit': {'old_data': {}},
62 62 'repo.edit.permissions': {},
63 63 'repo.delete': {'old_data': {}},
64 64 'repo.commit.strip': {},
65 65 'repo.archive.download': {},
66 66
67 'repo.pull_request.create': '',
68 'repo.pull_request.edit': '',
69 'repo.pull_request.delete': '',
70 'repo.pull_request.close': '',
71 'repo.pull_request.merge': '',
72 'repo.pull_request.vote': '',
73 'repo.pull_request.comment.create': '',
74 'repo.pull_request.comment.delete': '',
75
76 'repo.pull_request.reviewer.add': '',
77 'repo.pull_request.reviewer.delete': '',
78
79 'repo.commit.comment.create': '',
80 'repo.commit.comment.delete': '',
81 'repo.commit.vote': '',
82
67 83 'repo_group.create': {'data': {}},
68 84 'repo_group.edit': {'old_data': {}},
69 85 'repo_group.edit.permissions': {},
70 86 'repo_group.delete': {'old_data': {}},
71 87 }
88 ACTIONS = ACTIONS_V1
72 89
73 90 SOURCE_WEB = 'source_web'
74 91 SOURCE_API = 'source_api'
75 92
76 93
77 94 class UserWrap(object):
78 95 """
79 96 Fake object used to imitate AuthUser
80 97 """
81 98
82 99 def __init__(self, user_id=None, username=None, ip_addr=None):
83 100 self.user_id = user_id
84 101 self.username = username
85 102 self.ip_addr = ip_addr
86 103
87 104
88 105 class RepoWrap(object):
89 106 """
90 107 Fake object used to imitate RepoObject that audit logger requires
91 108 """
92 109
93 110 def __init__(self, repo_id=None, repo_name=None):
94 111 self.repo_id = repo_id
95 112 self.repo_name = repo_name
96 113
97 114
98 115 def _store_log(action_name, action_data, user_id, username, user_data,
99 116 ip_address, repository_id, repository_name):
100 117 user_log = UserLog()
101 118 user_log.version = UserLog.VERSION_2
102 119
103 120 user_log.action = action_name
104 121 user_log.action_data = action_data
105 122
106 123 user_log.user_ip = ip_address
107 124
108 125 user_log.user_id = user_id
109 126 user_log.username = username
110 127 user_log.user_data = user_data
111 128
112 129 user_log.repository_id = repository_id
113 130 user_log.repository_name = repository_name
114 131
115 132 user_log.action_date = datetime.datetime.now()
116 133
117 134 log.info('AUDIT: Logging action: `%s` by user:id:%s[%s] ip:%s',
118 135 action_name, user_id, username, ip_address)
119 136
120 137 return user_log
121 138
122 139
123 140 def store_web(*args, **kwargs):
124 141 if 'action_data' not in kwargs:
125 142 kwargs['action_data'] = {}
126 143 kwargs['action_data'].update({
127 144 'source': SOURCE_WEB
128 145 })
129 146 return store(*args, **kwargs)
130 147
131 148
132 149 def store_api(*args, **kwargs):
133 150 if 'action_data' not in kwargs:
134 151 kwargs['action_data'] = {}
135 152 kwargs['action_data'].update({
136 153 'source': SOURCE_API
137 154 })
138 155 return store(*args, **kwargs)
139 156
140 157
141 158 def store(action, user, action_data=None, user_data=None, ip_addr=None,
142 159 repo=None, sa_session=None, commit=False):
143 160 """
144 161 Audit logger for various actions made by users, typically this
145 162 results in a call such::
146 163
147 164 from rhodecode.lib import audit_logger
148 165
149 166 audit_logger.store(
150 167 action='repo.edit', user=self._rhodecode_user)
151 168 audit_logger.store(
152 169 action='repo.delete', action_data={'data': repo_data},
153 170 user=audit_logger.UserWrap(username='itried-login', ip_addr='8.8.8.8'))
154 171
155 172 # repo action
156 173 audit_logger.store(
157 174 action='repo.delete',
158 175 user=audit_logger.UserWrap(username='itried-login', ip_addr='8.8.8.8'),
159 176 repo=audit_logger.RepoWrap(repo_name='some-repo'))
160 177
161 178 # repo action, when we know and have the repository object already
162 179 audit_logger.store(
163 180 action='repo.delete',
164 181 action_data={'source': audit_logger.SOURCE_WEB, },
165 182 user=self._rhodecode_user,
166 183 repo=repo_object)
167 184
168 185 # alternative wrapper to the above
169 186 audit_logger.store_web(
170 187 action='repo.delete',
171 188 action_data={},
172 189 user=self._rhodecode_user,
173 190 repo=repo_object)
174 191
175 192 # without an user ?
176 193 audit_logger.store(
177 194 action='user.login.failure',
178 195 user=audit_logger.UserWrap(
179 196 username=self.request.params.get('username'),
180 197 ip_addr=self.request.remote_addr))
181 198
182 199 """
183 200 from rhodecode.lib.utils2 import safe_unicode
184 201 from rhodecode.lib.auth import AuthUser
185 202
186 203 action_spec = ACTIONS.get(action, None)
187 204 if action_spec is None:
188 205 raise ValueError('Action `{}` is not supported'.format(action))
189 206
190 207 if not sa_session:
191 208 sa_session = meta.Session()
192 209
193 210 try:
194 211 username = getattr(user, 'username', None)
195 212 if not username:
196 213 pass
197 214
198 215 user_id = getattr(user, 'user_id', None)
199 216 if not user_id:
200 217 # maybe we have username ? Try to figure user_id from username
201 218 if username:
202 219 user_id = getattr(
203 220 User.get_by_username(username), 'user_id', None)
204 221
205 222 ip_addr = ip_addr or getattr(user, 'ip_addr', None)
206 223 if not ip_addr:
207 224 pass
208 225
209 226 if not user_data:
210 227 # try to get this from the auth user
211 228 if isinstance(user, AuthUser):
212 229 user_data = {
213 230 'username': user.username,
214 231 'email': user.email,
215 232 }
216 233
217 234 repository_name = getattr(repo, 'repo_name', None)
218 235 repository_id = getattr(repo, 'repo_id', None)
219 236 if not repository_id:
220 237 # maybe we have repo_name ? Try to figure repo_id from repo_name
221 238 if repository_name:
222 239 repository_id = getattr(
223 240 Repository.get_by_repo_name(repository_name), 'repo_id', None)
224 241
225 242 user_log = _store_log(
226 243 action_name=safe_unicode(action),
227 244 action_data=action_data or {},
228 245 user_id=user_id,
229 246 username=username,
230 247 user_data=user_data or {},
231 248 ip_address=safe_unicode(ip_addr),
232 249 repository_id=repository_id,
233 250 repository_name=repository_name
234 251 )
235 252 sa_session.add(user_log)
236 253 if commit:
237 254 sa_session.commit()
238 255
239 256 except Exception:
240 257 log.exception('AUDIT: failed to store audit log')
@@ -1,1044 +1,982 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Utilities library for RhodeCode
23 23 """
24 24
25 25 import datetime
26 26 import decorator
27 27 import json
28 28 import logging
29 29 import os
30 30 import re
31 31 import shutil
32 32 import tempfile
33 33 import traceback
34 34 import tarfile
35 35 import warnings
36 36 import hashlib
37 37 from os.path import join as jn
38 38
39 39 import paste
40 40 import pkg_resources
41 41 from paste.script.command import Command, BadCommand
42 42 from webhelpers.text import collapse, remove_formatting, strip_tags
43 43 from mako import exceptions
44 44 from pyramid.threadlocal import get_current_registry
45 45 from pyramid.request import Request
46 46
47 47 from rhodecode.lib.fakemod import create_module
48 48 from rhodecode.lib.vcs.backends.base import Config
49 49 from rhodecode.lib.vcs.exceptions import VCSError
50 50 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
51 51 from rhodecode.lib.utils2 import (
52 52 safe_str, safe_unicode, get_current_rhodecode_user, md5)
53 53 from rhodecode.model import meta
54 54 from rhodecode.model.db import (
55 55 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
56 56 from rhodecode.model.meta import Session
57 57
58 58
59 59 log = logging.getLogger(__name__)
60 60
61 61 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
62 62
63 63 # String which contains characters that are not allowed in slug names for
64 64 # repositories or repository groups. It is properly escaped to use it in
65 65 # regular expressions.
66 66 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
67 67
68 68 # Regex that matches forbidden characters in repo/group slugs.
69 69 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
70 70
71 71 # Regex that matches allowed characters in repo/group slugs.
72 72 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
73 73
74 74 # Regex that matches whole repo/group slugs.
75 75 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
76 76
77 77 _license_cache = None
78 78
79 79
80 80 def repo_name_slug(value):
81 81 """
82 82 Return slug of name of repository
83 83 This function is called on each creation/modification
84 84 of repository to prevent bad names in repo
85 85 """
86 86 replacement_char = '-'
87 87
88 88 slug = remove_formatting(value)
89 89 slug = SLUG_BAD_CHAR_RE.sub('', slug)
90 90 slug = re.sub('[\s]+', '-', slug)
91 91 slug = collapse(slug, replacement_char)
92 92 return slug
93 93
94 94
95 95 #==============================================================================
96 96 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
97 97 #==============================================================================
98 98 def get_repo_slug(request):
99 99 if isinstance(request, Request) and getattr(request, 'db_repo', None):
100 100 # pyramid
101 101 _repo = request.db_repo.repo_name
102 102 else:
103 103 # TODO(marcink): remove after pylons migration...
104 104 _repo = request.environ['pylons.routes_dict'].get('repo_name')
105 105
106 106 if _repo:
107 107 _repo = _repo.rstrip('/')
108 108 return _repo
109 109
110 110
111 111 def get_repo_group_slug(request):
112 112 if isinstance(request, Request) and getattr(request, 'matchdict', None):
113 113 # pyramid
114 114 _group = request.matchdict.get('repo_group_name')
115 115 else:
116 116 _group = request.environ['pylons.routes_dict'].get('group_name')
117 117
118 118 if _group:
119 119 _group = _group.rstrip('/')
120 120 return _group
121 121
122 122
123 123 def get_user_group_slug(request):
124 124 if isinstance(request, Request) and getattr(request, 'matchdict', None):
125 125 # pyramid
126 126 _group = request.matchdict.get('user_group_id')
127 127 else:
128 128 _group = request.environ['pylons.routes_dict'].get('user_group_id')
129 129
130 130 try:
131 131 _group = UserGroup.get(_group)
132 132 if _group:
133 133 _group = _group.users_group_name
134 134 except Exception:
135 135 log.debug(traceback.format_exc())
136 136 # catch all failures here
137 137 pass
138 138
139 139 return _group
140 140
141 141
142 def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
143 """
144 Action logger for various actions made by users
145
146 :param user: user that made this action, can be a unique username string or
147 object containing user_id attribute
148 :param action: action to log, should be on of predefined unique actions for
149 easy translations
150 :param repo: string name of repository or object containing repo_id,
151 that action was made on
152 :param ipaddr: optional ip address from what the action was made
153 :param sa: optional sqlalchemy session
154
155 """
156
157 if not sa:
158 sa = meta.Session()
159 # if we don't get explicit IP address try to get one from registered user
160 # in tmpl context var
161 if not ipaddr:
162 ipaddr = getattr(get_current_rhodecode_user(), 'ip_addr', '')
163
164 try:
165 if getattr(user, 'user_id', None):
166 user_obj = User.get(user.user_id)
167 elif isinstance(user, basestring):
168 user_obj = User.get_by_username(user)
169 else:
170 raise Exception('You have to provide a user object or a username')
171
172 if getattr(repo, 'repo_id', None):
173 repo_obj = Repository.get(repo.repo_id)
174 repo_name = repo_obj.repo_name
175 elif isinstance(repo, basestring):
176 repo_name = repo.lstrip('/')
177 repo_obj = Repository.get_by_repo_name(repo_name)
178 else:
179 repo_obj = None
180 repo_name = ''
181
182 user_log = UserLog()
183 user_log.user_id = user_obj.user_id
184 user_log.username = user_obj.username
185 action = safe_unicode(action)
186 user_log.action = action[:1200000]
187
188 user_log.repository = repo_obj
189 user_log.repository_name = repo_name
190
191 user_log.action_date = datetime.datetime.now()
192 user_log.user_ip = ipaddr
193 sa.add(user_log)
194
195 log.info('Logging action:`%s` on repo:`%s` by user:%s ip:%s',
196 action, safe_unicode(repo), user_obj, ipaddr)
197 if commit:
198 sa.commit()
199 except Exception:
200 log.error(traceback.format_exc())
201 raise
202
203
204 142 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
205 143 """
206 144 Scans given path for repos and return (name,(type,path)) tuple
207 145
208 146 :param path: path to scan for repositories
209 147 :param recursive: recursive search and return names with subdirs in front
210 148 """
211 149
212 150 # remove ending slash for better results
213 151 path = path.rstrip(os.sep)
214 152 log.debug('now scanning in %s location recursive:%s...', path, recursive)
215 153
216 154 def _get_repos(p):
217 155 dirpaths = _get_dirpaths(p)
218 156 if not _is_dir_writable(p):
219 157 log.warning('repo path without write access: %s', p)
220 158
221 159 for dirpath in dirpaths:
222 160 if os.path.isfile(os.path.join(p, dirpath)):
223 161 continue
224 162 cur_path = os.path.join(p, dirpath)
225 163
226 164 # skip removed repos
227 165 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
228 166 continue
229 167
230 168 #skip .<somethin> dirs
231 169 if dirpath.startswith('.'):
232 170 continue
233 171
234 172 try:
235 173 scm_info = get_scm(cur_path)
236 174 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
237 175 except VCSError:
238 176 if not recursive:
239 177 continue
240 178 #check if this dir containts other repos for recursive scan
241 179 rec_path = os.path.join(p, dirpath)
242 180 if os.path.isdir(rec_path):
243 181 for inner_scm in _get_repos(rec_path):
244 182 yield inner_scm
245 183
246 184 return _get_repos(path)
247 185
248 186
249 187 def _get_dirpaths(p):
250 188 try:
251 189 # OS-independable way of checking if we have at least read-only
252 190 # access or not.
253 191 dirpaths = os.listdir(p)
254 192 except OSError:
255 193 log.warning('ignoring repo path without read access: %s', p)
256 194 return []
257 195
258 196 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
259 197 # decode paths and suddenly returns unicode objects itself. The items it
260 198 # cannot decode are returned as strings and cause issues.
261 199 #
262 200 # Those paths are ignored here until a solid solution for path handling has
263 201 # been built.
264 202 expected_type = type(p)
265 203
266 204 def _has_correct_type(item):
267 205 if type(item) is not expected_type:
268 206 log.error(
269 207 u"Ignoring path %s since it cannot be decoded into unicode.",
270 208 # Using "repr" to make sure that we see the byte value in case
271 209 # of support.
272 210 repr(item))
273 211 return False
274 212 return True
275 213
276 214 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
277 215
278 216 return dirpaths
279 217
280 218
281 219 def _is_dir_writable(path):
282 220 """
283 221 Probe if `path` is writable.
284 222
285 223 Due to trouble on Cygwin / Windows, this is actually probing if it is
286 224 possible to create a file inside of `path`, stat does not produce reliable
287 225 results in this case.
288 226 """
289 227 try:
290 228 with tempfile.TemporaryFile(dir=path):
291 229 pass
292 230 except OSError:
293 231 return False
294 232 return True
295 233
296 234
297 235 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None):
298 236 """
299 237 Returns True if given path is a valid repository False otherwise.
300 238 If expect_scm param is given also, compare if given scm is the same
301 239 as expected from scm parameter. If explicit_scm is given don't try to
302 240 detect the scm, just use the given one to check if repo is valid
303 241
304 242 :param repo_name:
305 243 :param base_path:
306 244 :param expect_scm:
307 245 :param explicit_scm:
308 246
309 247 :return True: if given path is a valid repository
310 248 """
311 249 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
312 250 log.debug('Checking if `%s` is a valid path for repository. '
313 251 'Explicit type: %s', repo_name, explicit_scm)
314 252
315 253 try:
316 254 if explicit_scm:
317 255 detected_scms = [get_scm_backend(explicit_scm)]
318 256 else:
319 257 detected_scms = get_scm(full_path)
320 258
321 259 if expect_scm:
322 260 return detected_scms[0] == expect_scm
323 261 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
324 262 return True
325 263 except VCSError:
326 264 log.debug('path: %s is not a valid repo !', full_path)
327 265 return False
328 266
329 267
330 268 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
331 269 """
332 270 Returns True if given path is a repository group, False otherwise
333 271
334 272 :param repo_name:
335 273 :param base_path:
336 274 """
337 275 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
338 276 log.debug('Checking if `%s` is a valid path for repository group',
339 277 repo_group_name)
340 278
341 279 # check if it's not a repo
342 280 if is_valid_repo(repo_group_name, base_path):
343 281 log.debug('Repo called %s exist, it is not a valid '
344 282 'repo group' % repo_group_name)
345 283 return False
346 284
347 285 try:
348 286 # we need to check bare git repos at higher level
349 287 # since we might match branches/hooks/info/objects or possible
350 288 # other things inside bare git repo
351 289 scm_ = get_scm(os.path.dirname(full_path))
352 290 log.debug('path: %s is a vcs object:%s, not valid '
353 291 'repo group' % (full_path, scm_))
354 292 return False
355 293 except VCSError:
356 294 pass
357 295
358 296 # check if it's a valid path
359 297 if skip_path_check or os.path.isdir(full_path):
360 298 log.debug('path: %s is a valid repo group !', full_path)
361 299 return True
362 300
363 301 log.debug('path: %s is not a valid repo group !', full_path)
364 302 return False
365 303
366 304
367 305 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
368 306 while True:
369 307 ok = raw_input(prompt)
370 308 if ok.lower() in ('y', 'ye', 'yes'):
371 309 return True
372 310 if ok.lower() in ('n', 'no', 'nop', 'nope'):
373 311 return False
374 312 retries = retries - 1
375 313 if retries < 0:
376 314 raise IOError
377 315 print(complaint)
378 316
379 317 # propagated from mercurial documentation
380 318 ui_sections = [
381 319 'alias', 'auth',
382 320 'decode/encode', 'defaults',
383 321 'diff', 'email',
384 322 'extensions', 'format',
385 323 'merge-patterns', 'merge-tools',
386 324 'hooks', 'http_proxy',
387 325 'smtp', 'patch',
388 326 'paths', 'profiling',
389 327 'server', 'trusted',
390 328 'ui', 'web', ]
391 329
392 330
393 331 def config_data_from_db(clear_session=True, repo=None):
394 332 """
395 333 Read the configuration data from the database and return configuration
396 334 tuples.
397 335 """
398 336 from rhodecode.model.settings import VcsSettingsModel
399 337
400 338 config = []
401 339
402 340 sa = meta.Session()
403 341 settings_model = VcsSettingsModel(repo=repo, sa=sa)
404 342
405 343 ui_settings = settings_model.get_ui_settings()
406 344
407 345 for setting in ui_settings:
408 346 if setting.active:
409 347 log.debug(
410 348 'settings ui from db: [%s] %s=%s',
411 349 setting.section, setting.key, setting.value)
412 350 config.append((
413 351 safe_str(setting.section), safe_str(setting.key),
414 352 safe_str(setting.value)))
415 353 if setting.key == 'push_ssl':
416 354 # force set push_ssl requirement to False, rhodecode
417 355 # handles that
418 356 config.append((
419 357 safe_str(setting.section), safe_str(setting.key), False))
420 358 if clear_session:
421 359 meta.Session.remove()
422 360
423 361 # TODO: mikhail: probably it makes no sense to re-read hooks information.
424 362 # It's already there and activated/deactivated
425 363 skip_entries = []
426 364 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
427 365 if 'pull' not in enabled_hook_classes:
428 366 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
429 367 if 'push' not in enabled_hook_classes:
430 368 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
431 369 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
432 370 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
433 371
434 372 config = [entry for entry in config if entry[:2] not in skip_entries]
435 373
436 374 return config
437 375
438 376
439 377 def make_db_config(clear_session=True, repo=None):
440 378 """
441 379 Create a :class:`Config` instance based on the values in the database.
442 380 """
443 381 config = Config()
444 382 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
445 383 for section, option, value in config_data:
446 384 config.set(section, option, value)
447 385 return config
448 386
449 387
450 388 def get_enabled_hook_classes(ui_settings):
451 389 """
452 390 Return the enabled hook classes.
453 391
454 392 :param ui_settings: List of ui_settings as returned
455 393 by :meth:`VcsSettingsModel.get_ui_settings`
456 394
457 395 :return: a list with the enabled hook classes. The order is not guaranteed.
458 396 :rtype: list
459 397 """
460 398 enabled_hooks = []
461 399 active_hook_keys = [
462 400 key for section, key, value, active in ui_settings
463 401 if section == 'hooks' and active]
464 402
465 403 hook_names = {
466 404 RhodeCodeUi.HOOK_PUSH: 'push',
467 405 RhodeCodeUi.HOOK_PULL: 'pull',
468 406 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
469 407 }
470 408
471 409 for key in active_hook_keys:
472 410 hook = hook_names.get(key)
473 411 if hook:
474 412 enabled_hooks.append(hook)
475 413
476 414 return enabled_hooks
477 415
478 416
479 417 def set_rhodecode_config(config):
480 418 """
481 419 Updates pylons config with new settings from database
482 420
483 421 :param config:
484 422 """
485 423 from rhodecode.model.settings import SettingsModel
486 424 app_settings = SettingsModel().get_all_settings()
487 425
488 426 for k, v in app_settings.items():
489 427 config[k] = v
490 428
491 429
492 430 def get_rhodecode_realm():
493 431 """
494 432 Return the rhodecode realm from database.
495 433 """
496 434 from rhodecode.model.settings import SettingsModel
497 435 realm = SettingsModel().get_setting_by_name('realm')
498 436 return safe_str(realm.app_settings_value)
499 437
500 438
501 439 def get_rhodecode_base_path():
502 440 """
503 441 Returns the base path. The base path is the filesystem path which points
504 442 to the repository store.
505 443 """
506 444 from rhodecode.model.settings import SettingsModel
507 445 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
508 446 return safe_str(paths_ui.ui_value)
509 447
510 448
511 449 def map_groups(path):
512 450 """
513 451 Given a full path to a repository, create all nested groups that this
514 452 repo is inside. This function creates parent-child relationships between
515 453 groups and creates default perms for all new groups.
516 454
517 455 :param paths: full path to repository
518 456 """
519 457 from rhodecode.model.repo_group import RepoGroupModel
520 458 sa = meta.Session()
521 459 groups = path.split(Repository.NAME_SEP)
522 460 parent = None
523 461 group = None
524 462
525 463 # last element is repo in nested groups structure
526 464 groups = groups[:-1]
527 465 rgm = RepoGroupModel(sa)
528 466 owner = User.get_first_super_admin()
529 467 for lvl, group_name in enumerate(groups):
530 468 group_name = '/'.join(groups[:lvl] + [group_name])
531 469 group = RepoGroup.get_by_group_name(group_name)
532 470 desc = '%s group' % group_name
533 471
534 472 # skip folders that are now removed repos
535 473 if REMOVED_REPO_PAT.match(group_name):
536 474 break
537 475
538 476 if group is None:
539 477 log.debug('creating group level: %s group_name: %s',
540 478 lvl, group_name)
541 479 group = RepoGroup(group_name, parent)
542 480 group.group_description = desc
543 481 group.user = owner
544 482 sa.add(group)
545 483 perm_obj = rgm._create_default_perms(group)
546 484 sa.add(perm_obj)
547 485 sa.flush()
548 486
549 487 parent = group
550 488 return group
551 489
552 490
553 491 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
554 492 """
555 493 maps all repos given in initial_repo_list, non existing repositories
556 494 are created, if remove_obsolete is True it also checks for db entries
557 495 that are not in initial_repo_list and removes them.
558 496
559 497 :param initial_repo_list: list of repositories found by scanning methods
560 498 :param remove_obsolete: check for obsolete entries in database
561 499 """
562 500 from rhodecode.model.repo import RepoModel
563 501 from rhodecode.model.scm import ScmModel
564 502 from rhodecode.model.repo_group import RepoGroupModel
565 503 from rhodecode.model.settings import SettingsModel
566 504
567 505 sa = meta.Session()
568 506 repo_model = RepoModel()
569 507 user = User.get_first_super_admin()
570 508 added = []
571 509
572 510 # creation defaults
573 511 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
574 512 enable_statistics = defs.get('repo_enable_statistics')
575 513 enable_locking = defs.get('repo_enable_locking')
576 514 enable_downloads = defs.get('repo_enable_downloads')
577 515 private = defs.get('repo_private')
578 516
579 517 for name, repo in initial_repo_list.items():
580 518 group = map_groups(name)
581 519 unicode_name = safe_unicode(name)
582 520 db_repo = repo_model.get_by_repo_name(unicode_name)
583 521 # found repo that is on filesystem not in RhodeCode database
584 522 if not db_repo:
585 523 log.info('repository %s not found, creating now', name)
586 524 added.append(name)
587 525 desc = (repo.description
588 526 if repo.description != 'unknown'
589 527 else '%s repository' % name)
590 528
591 529 db_repo = repo_model._create_repo(
592 530 repo_name=name,
593 531 repo_type=repo.alias,
594 532 description=desc,
595 533 repo_group=getattr(group, 'group_id', None),
596 534 owner=user,
597 535 enable_locking=enable_locking,
598 536 enable_downloads=enable_downloads,
599 537 enable_statistics=enable_statistics,
600 538 private=private,
601 539 state=Repository.STATE_CREATED
602 540 )
603 541 sa.commit()
604 542 # we added that repo just now, and make sure we updated server info
605 543 if db_repo.repo_type == 'git':
606 544 git_repo = db_repo.scm_instance()
607 545 # update repository server-info
608 546 log.debug('Running update server info')
609 547 git_repo._update_server_info()
610 548
611 549 db_repo.update_commit_cache()
612 550
613 551 config = db_repo._config
614 552 config.set('extensions', 'largefiles', '')
615 553 ScmModel().install_hooks(
616 554 db_repo.scm_instance(config=config),
617 555 repo_type=db_repo.repo_type)
618 556
619 557 removed = []
620 558 if remove_obsolete:
621 559 # remove from database those repositories that are not in the filesystem
622 560 for repo in sa.query(Repository).all():
623 561 if repo.repo_name not in initial_repo_list.keys():
624 562 log.debug("Removing non-existing repository found in db `%s`",
625 563 repo.repo_name)
626 564 try:
627 565 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
628 566 sa.commit()
629 567 removed.append(repo.repo_name)
630 568 except Exception:
631 569 # don't hold further removals on error
632 570 log.error(traceback.format_exc())
633 571 sa.rollback()
634 572
635 573 def splitter(full_repo_name):
636 574 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
637 575 gr_name = None
638 576 if len(_parts) == 2:
639 577 gr_name = _parts[0]
640 578 return gr_name
641 579
642 580 initial_repo_group_list = [splitter(x) for x in
643 581 initial_repo_list.keys() if splitter(x)]
644 582
645 583 # remove from database those repository groups that are not in the
646 584 # filesystem due to parent child relationships we need to delete them
647 585 # in a specific order of most nested first
648 586 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
649 587 nested_sort = lambda gr: len(gr.split('/'))
650 588 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
651 589 if group_name not in initial_repo_group_list:
652 590 repo_group = RepoGroup.get_by_group_name(group_name)
653 591 if (repo_group.children.all() or
654 592 not RepoGroupModel().check_exist_filesystem(
655 593 group_name=group_name, exc_on_failure=False)):
656 594 continue
657 595
658 596 log.info(
659 597 'Removing non-existing repository group found in db `%s`',
660 598 group_name)
661 599 try:
662 600 RepoGroupModel(sa).delete(group_name, fs_remove=False)
663 601 sa.commit()
664 602 removed.append(group_name)
665 603 except Exception:
666 604 # don't hold further removals on error
667 605 log.exception(
668 606 'Unable to remove repository group `%s`',
669 607 group_name)
670 608 sa.rollback()
671 609 raise
672 610
673 611 return added, removed
674 612
675 613
676 614 def get_default_cache_settings(settings):
677 615 cache_settings = {}
678 616 for key in settings.keys():
679 617 for prefix in ['beaker.cache.', 'cache.']:
680 618 if key.startswith(prefix):
681 619 name = key.split(prefix)[1].strip()
682 620 cache_settings[name] = settings[key].strip()
683 621 return cache_settings
684 622
685 623
686 624 # set cache regions for beaker so celery can utilise it
687 625 def add_cache(settings):
688 626 from rhodecode.lib import caches
689 627 cache_settings = {'regions': None}
690 628 # main cache settings used as default ...
691 629 cache_settings.update(get_default_cache_settings(settings))
692 630
693 631 if cache_settings['regions']:
694 632 for region in cache_settings['regions'].split(','):
695 633 region = region.strip()
696 634 region_settings = {}
697 635 for key, value in cache_settings.items():
698 636 if key.startswith(region):
699 637 region_settings[key.split('.')[1]] = value
700 638
701 639 caches.configure_cache_region(
702 640 region, region_settings, cache_settings)
703 641
704 642
705 643 def load_rcextensions(root_path):
706 644 import rhodecode
707 645 from rhodecode.config import conf
708 646
709 647 path = os.path.join(root_path, 'rcextensions', '__init__.py')
710 648 if os.path.isfile(path):
711 649 rcext = create_module('rc', path)
712 650 EXT = rhodecode.EXTENSIONS = rcext
713 651 log.debug('Found rcextensions now loading %s...', rcext)
714 652
715 653 # Additional mappings that are not present in the pygments lexers
716 654 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
717 655
718 656 # auto check if the module is not missing any data, set to default if is
719 657 # this will help autoupdate new feature of rcext module
720 658 #from rhodecode.config import rcextensions
721 659 #for k in dir(rcextensions):
722 660 # if not k.startswith('_') and not hasattr(EXT, k):
723 661 # setattr(EXT, k, getattr(rcextensions, k))
724 662
725 663
726 664 def get_custom_lexer(extension):
727 665 """
728 666 returns a custom lexer if it is defined in rcextensions module, or None
729 667 if there's no custom lexer defined
730 668 """
731 669 import rhodecode
732 670 from pygments import lexers
733 671
734 672 # custom override made by RhodeCode
735 673 if extension in ['mako']:
736 674 return lexers.get_lexer_by_name('html+mako')
737 675
738 676 # check if we didn't define this extension as other lexer
739 677 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
740 678 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
741 679 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
742 680 return lexers.get_lexer_by_name(_lexer_name)
743 681
744 682
745 683 #==============================================================================
746 684 # TEST FUNCTIONS AND CREATORS
747 685 #==============================================================================
748 686 def create_test_index(repo_location, config):
749 687 """
750 688 Makes default test index.
751 689 """
752 690 import rc_testdata
753 691
754 692 rc_testdata.extract_search_index(
755 693 'vcs_search_index', os.path.dirname(config['search.location']))
756 694
757 695
758 696 def create_test_directory(test_path):
759 697 """
760 698 Create test directory if it doesn't exist.
761 699 """
762 700 if not os.path.isdir(test_path):
763 701 log.debug('Creating testdir %s', test_path)
764 702 os.makedirs(test_path)
765 703
766 704
767 705 def create_test_database(test_path, config):
768 706 """
769 707 Makes a fresh database.
770 708 """
771 709 from rhodecode.lib.db_manage import DbManage
772 710
773 711 # PART ONE create db
774 712 dbconf = config['sqlalchemy.db1.url']
775 713 log.debug('making test db %s', dbconf)
776 714
777 715 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
778 716 tests=True, cli_args={'force_ask': True})
779 717 dbmanage.create_tables(override=True)
780 718 dbmanage.set_db_version()
781 719 # for tests dynamically set new root paths based on generated content
782 720 dbmanage.create_settings(dbmanage.config_prompt(test_path))
783 721 dbmanage.create_default_user()
784 722 dbmanage.create_test_admin_and_users()
785 723 dbmanage.create_permissions()
786 724 dbmanage.populate_default_permissions()
787 725 Session().commit()
788 726
789 727
790 728 def create_test_repositories(test_path, config):
791 729 """
792 730 Creates test repositories in the temporary directory. Repositories are
793 731 extracted from archives within the rc_testdata package.
794 732 """
795 733 import rc_testdata
796 734 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
797 735
798 736 log.debug('making test vcs repositories')
799 737
800 738 idx_path = config['search.location']
801 739 data_path = config['cache_dir']
802 740
803 741 # clean index and data
804 742 if idx_path and os.path.exists(idx_path):
805 743 log.debug('remove %s', idx_path)
806 744 shutil.rmtree(idx_path)
807 745
808 746 if data_path and os.path.exists(data_path):
809 747 log.debug('remove %s', data_path)
810 748 shutil.rmtree(data_path)
811 749
812 750 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
813 751 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
814 752
815 753 # Note: Subversion is in the process of being integrated with the system,
816 754 # until we have a properly packed version of the test svn repository, this
817 755 # tries to copy over the repo from a package "rc_testdata"
818 756 svn_repo_path = rc_testdata.get_svn_repo_archive()
819 757 with tarfile.open(svn_repo_path) as tar:
820 758 tar.extractall(jn(test_path, SVN_REPO))
821 759
822 760
823 761 #==============================================================================
824 762 # PASTER COMMANDS
825 763 #==============================================================================
826 764 class BasePasterCommand(Command):
827 765 """
828 766 Abstract Base Class for paster commands.
829 767
830 768 The celery commands are somewhat aggressive about loading
831 769 celery.conf, and since our module sets the `CELERY_LOADER`
832 770 environment variable to our loader, we have to bootstrap a bit and
833 771 make sure we've had a chance to load the pylons config off of the
834 772 command line, otherwise everything fails.
835 773 """
836 774 min_args = 1
837 775 min_args_error = "Please provide a paster config file as an argument."
838 776 takes_config_file = 1
839 777 requires_config_file = True
840 778
841 779 def notify_msg(self, msg, log=False):
842 780 """Make a notification to user, additionally if logger is passed
843 781 it logs this action using given logger
844 782
845 783 :param msg: message that will be printed to user
846 784 :param log: logging instance, to use to additionally log this message
847 785
848 786 """
849 787 if log and isinstance(log, logging):
850 788 log(msg)
851 789
852 790 def run(self, args):
853 791 """
854 792 Overrides Command.run
855 793
856 794 Checks for a config file argument and loads it.
857 795 """
858 796 if len(args) < self.min_args:
859 797 raise BadCommand(
860 798 self.min_args_error % {'min_args': self.min_args,
861 799 'actual_args': len(args)})
862 800
863 801 # Decrement because we're going to lob off the first argument.
864 802 # @@ This is hacky
865 803 self.min_args -= 1
866 804 self.bootstrap_config(args[0])
867 805 self.update_parser()
868 806 return super(BasePasterCommand, self).run(args[1:])
869 807
870 808 def update_parser(self):
871 809 """
872 810 Abstract method. Allows for the class' parser to be updated
873 811 before the superclass' `run` method is called. Necessary to
874 812 allow options/arguments to be passed through to the underlying
875 813 celery command.
876 814 """
877 815 raise NotImplementedError("Abstract Method.")
878 816
879 817 def bootstrap_config(self, conf):
880 818 """
881 819 Loads the pylons configuration.
882 820 """
883 821 from pylons import config as pylonsconfig
884 822
885 823 self.path_to_ini_file = os.path.realpath(conf)
886 824 conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
887 825 pylonsconfig.init_app(conf.global_conf, conf.local_conf)
888 826
889 827 def _init_session(self):
890 828 """
891 829 Inits SqlAlchemy Session
892 830 """
893 831 logging.config.fileConfig(self.path_to_ini_file)
894 832 from pylons import config
895 833 from rhodecode.config.utils import initialize_database
896 834
897 835 # get to remove repos !!
898 836 add_cache(config)
899 837 initialize_database(config)
900 838
901 839
902 840 @decorator.decorator
903 841 def jsonify(func, *args, **kwargs):
904 842 """Action decorator that formats output for JSON
905 843
906 844 Given a function that will return content, this decorator will turn
907 845 the result into JSON, with a content-type of 'application/json' and
908 846 output it.
909 847
910 848 """
911 849 from pylons.decorators.util import get_pylons
912 850 from rhodecode.lib.ext_json import json
913 851 pylons = get_pylons(args)
914 852 pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
915 853 data = func(*args, **kwargs)
916 854 if isinstance(data, (list, tuple)):
917 855 msg = "JSON responses with Array envelopes are susceptible to " \
918 856 "cross-site data leak attacks, see " \
919 857 "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
920 858 warnings.warn(msg, Warning, 2)
921 859 log.warning(msg)
922 860 log.debug("Returning JSON wrapped action output")
923 861 return json.dumps(data, encoding='utf-8')
924 862
925 863
926 864 class PartialRenderer(object):
927 865 """
928 866 Partial renderer used to render chunks of html used in datagrids
929 867 use like::
930 868
931 869 _render = PartialRenderer('data_table/_dt_elements.mako')
932 870 _render('quick_menu', args, kwargs)
933 871 PartialRenderer.h,
934 872 c,
935 873 _,
936 874 ungettext
937 875 are the template stuff initialized inside and can be re-used later
938 876
939 877 :param tmpl_name: template path relate to /templates/ dir
940 878 """
941 879
942 880 def __init__(self, tmpl_name):
943 881 import rhodecode
944 882 from pylons import request, tmpl_context as c
945 883 from pylons.i18n.translation import _, ungettext
946 884 from rhodecode.lib import helpers as h
947 885
948 886 self.tmpl_name = tmpl_name
949 887 self.rhodecode = rhodecode
950 888 self.c = c
951 889 self._ = _
952 890 self.ungettext = ungettext
953 891 self.h = h
954 892 self.request = request
955 893
956 894 def _mako_lookup(self):
957 895 _tmpl_lookup = self.rhodecode.CONFIG['pylons.app_globals'].mako_lookup
958 896 return _tmpl_lookup.get_template(self.tmpl_name)
959 897
960 898 def _update_kwargs_for_render(self, kwargs):
961 899 """
962 900 Inject params required for Mako rendering
963 901 """
964 902 _kwargs = {
965 903 '_': self._,
966 904 'h': self.h,
967 905 'c': self.c,
968 906 'request': self.request,
969 907 'ungettext': self.ungettext,
970 908 }
971 909 _kwargs.update(kwargs)
972 910 return _kwargs
973 911
974 912 def _render_with_exc(self, render_func, args, kwargs):
975 913 try:
976 914 return render_func.render(*args, **kwargs)
977 915 except:
978 916 log.error(exceptions.text_error_template().render())
979 917 raise
980 918
981 919 def _get_template(self, template_obj, def_name):
982 920 if def_name:
983 921 tmpl = template_obj.get_def(def_name)
984 922 else:
985 923 tmpl = template_obj
986 924 return tmpl
987 925
988 926 def render(self, def_name, *args, **kwargs):
989 927 lookup_obj = self._mako_lookup()
990 928 tmpl = self._get_template(lookup_obj, def_name=def_name)
991 929 kwargs = self._update_kwargs_for_render(kwargs)
992 930 return self._render_with_exc(tmpl, args, kwargs)
993 931
994 932 def __call__(self, tmpl, *args, **kwargs):
995 933 return self.render(tmpl, *args, **kwargs)
996 934
997 935
998 936 def password_changed(auth_user, session):
999 937 # Never report password change in case of default user or anonymous user.
1000 938 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
1001 939 return False
1002 940
1003 941 password_hash = md5(auth_user.password) if auth_user.password else None
1004 942 rhodecode_user = session.get('rhodecode_user', {})
1005 943 session_password_hash = rhodecode_user.get('password', '')
1006 944 return password_hash != session_password_hash
1007 945
1008 946
1009 947 def read_opensource_licenses():
1010 948 global _license_cache
1011 949
1012 950 if not _license_cache:
1013 951 licenses = pkg_resources.resource_string(
1014 952 'rhodecode', 'config/licenses.json')
1015 953 _license_cache = json.loads(licenses)
1016 954
1017 955 return _license_cache
1018 956
1019 957
1020 958 def get_registry(request):
1021 959 """
1022 960 Utility to get the pyramid registry from a request. During migration to
1023 961 pyramid we sometimes want to use the pyramid registry from pylons context.
1024 962 Therefore this utility returns `request.registry` for pyramid requests and
1025 963 uses `get_current_registry()` for pylons requests.
1026 964 """
1027 965 try:
1028 966 return request.registry
1029 967 except AttributeError:
1030 968 return get_current_registry()
1031 969
1032 970
1033 971 def generate_platform_uuid():
1034 972 """
1035 973 Generates platform UUID based on it's name
1036 974 """
1037 975 import platform
1038 976
1039 977 try:
1040 978 uuid_list = [platform.platform()]
1041 979 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
1042 980 except Exception as e:
1043 981 log.error('Failed to generate host uuid: %s' % e)
1044 982 return 'UNDEFINED'
@@ -1,650 +1,666 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 comments model for RhodeCode
23 23 """
24 24
25 25 import logging
26 26 import traceback
27 27 import collections
28 28
29 29 from datetime import datetime
30 30
31 31 from pylons.i18n.translation import _
32 32 from pyramid.threadlocal import get_current_registry, get_current_request
33 33 from sqlalchemy.sql.expression import null
34 34 from sqlalchemy.sql.functions import coalesce
35 35
36 36 from rhodecode.lib import helpers as h, diffs
37 from rhodecode.lib import audit_logger
37 38 from rhodecode.lib.channelstream import channelstream_request
38 from rhodecode.lib.utils import action_logger
39 39 from rhodecode.lib.utils2 import extract_mentioned_users, safe_str
40 40 from rhodecode.model import BaseModel
41 41 from rhodecode.model.db import (
42 42 ChangesetComment, User, Notification, PullRequest, AttributeDict)
43 43 from rhodecode.model.notification import NotificationModel
44 44 from rhodecode.model.meta import Session
45 45 from rhodecode.model.settings import VcsSettingsModel
46 46 from rhodecode.model.notification import EmailNotificationModel
47 47 from rhodecode.model.validation_schema.schemas import comment_schema
48 48
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class CommentsModel(BaseModel):
54 54
55 55 cls = ChangesetComment
56 56
57 57 DIFF_CONTEXT_BEFORE = 3
58 58 DIFF_CONTEXT_AFTER = 3
59 59
60 60 def __get_commit_comment(self, changeset_comment):
61 61 return self._get_instance(ChangesetComment, changeset_comment)
62 62
63 63 def __get_pull_request(self, pull_request):
64 64 return self._get_instance(PullRequest, pull_request)
65 65
66 66 def _extract_mentions(self, s):
67 67 user_objects = []
68 68 for username in extract_mentioned_users(s):
69 69 user_obj = User.get_by_username(username, case_insensitive=True)
70 70 if user_obj:
71 71 user_objects.append(user_obj)
72 72 return user_objects
73 73
74 74 def _get_renderer(self, global_renderer='rst'):
75 75 try:
76 76 # try reading from visual context
77 77 from pylons import tmpl_context
78 78 global_renderer = tmpl_context.visual.default_renderer
79 79 except AttributeError:
80 80 log.debug("Renderer not set, falling back "
81 81 "to default renderer '%s'", global_renderer)
82 82 except Exception:
83 83 log.error(traceback.format_exc())
84 84 return global_renderer
85 85
86 86 def aggregate_comments(self, comments, versions, show_version, inline=False):
87 87 # group by versions, and count until, and display objects
88 88
89 89 comment_groups = collections.defaultdict(list)
90 90 [comment_groups[
91 91 _co.pull_request_version_id].append(_co) for _co in comments]
92 92
93 93 def yield_comments(pos):
94 94 for co in comment_groups[pos]:
95 95 yield co
96 96
97 97 comment_versions = collections.defaultdict(
98 98 lambda: collections.defaultdict(list))
99 99 prev_prvid = -1
100 100 # fake last entry with None, to aggregate on "latest" version which
101 101 # doesn't have an pull_request_version_id
102 102 for ver in versions + [AttributeDict({'pull_request_version_id': None})]:
103 103 prvid = ver.pull_request_version_id
104 104 if prev_prvid == -1:
105 105 prev_prvid = prvid
106 106
107 107 for co in yield_comments(prvid):
108 108 comment_versions[prvid]['at'].append(co)
109 109
110 110 # save until
111 111 current = comment_versions[prvid]['at']
112 112 prev_until = comment_versions[prev_prvid]['until']
113 113 cur_until = prev_until + current
114 114 comment_versions[prvid]['until'].extend(cur_until)
115 115
116 116 # save outdated
117 117 if inline:
118 118 outdated = [x for x in cur_until
119 119 if x.outdated_at_version(show_version)]
120 120 else:
121 121 outdated = [x for x in cur_until
122 122 if x.older_than_version(show_version)]
123 123 display = [x for x in cur_until if x not in outdated]
124 124
125 125 comment_versions[prvid]['outdated'] = outdated
126 126 comment_versions[prvid]['display'] = display
127 127
128 128 prev_prvid = prvid
129 129
130 130 return comment_versions
131 131
132 132 def get_unresolved_todos(self, pull_request, show_outdated=True):
133 133
134 134 todos = Session().query(ChangesetComment) \
135 135 .filter(ChangesetComment.pull_request == pull_request) \
136 136 .filter(ChangesetComment.resolved_by == None) \
137 137 .filter(ChangesetComment.comment_type
138 138 == ChangesetComment.COMMENT_TYPE_TODO)
139 139
140 140 if not show_outdated:
141 141 todos = todos.filter(
142 142 coalesce(ChangesetComment.display_state, '') !=
143 143 ChangesetComment.COMMENT_OUTDATED)
144 144
145 145 todos = todos.all()
146 146
147 147 return todos
148 148
149 149 def get_commit_unresolved_todos(self, commit_id, show_outdated=True):
150 150
151 151 todos = Session().query(ChangesetComment) \
152 152 .filter(ChangesetComment.revision == commit_id) \
153 153 .filter(ChangesetComment.resolved_by == None) \
154 154 .filter(ChangesetComment.comment_type
155 155 == ChangesetComment.COMMENT_TYPE_TODO)
156 156
157 157 if not show_outdated:
158 158 todos = todos.filter(
159 159 coalesce(ChangesetComment.display_state, '') !=
160 160 ChangesetComment.COMMENT_OUTDATED)
161 161
162 162 todos = todos.all()
163 163
164 164 return todos
165 165
166 def _log_audit_action(self, action, action_data, user, comment):
167 audit_logger.store(
168 action=action,
169 action_data=action_data,
170 user=user,
171 repo=comment.repo)
172
166 173 def create(self, text, repo, user, commit_id=None, pull_request=None,
167 174 f_path=None, line_no=None, status_change=None,
168 175 status_change_type=None, comment_type=None,
169 176 resolves_comment_id=None, closing_pr=False, send_email=True,
170 177 renderer=None):
171 178 """
172 179 Creates new comment for commit or pull request.
173 180 IF status_change is not none this comment is associated with a
174 181 status change of commit or commit associated with pull request
175 182
176 183 :param text:
177 184 :param repo:
178 185 :param user:
179 186 :param commit_id:
180 187 :param pull_request:
181 188 :param f_path:
182 189 :param line_no:
183 190 :param status_change: Label for status change
184 191 :param comment_type: Type of comment
185 192 :param status_change_type: type of status change
186 193 :param closing_pr:
187 194 :param send_email:
188 195 :param renderer: pick renderer for this comment
189 196 """
190 197 if not text:
191 198 log.warning('Missing text for comment, skipping...')
192 199 return
193 200
194 201 if not renderer:
195 202 renderer = self._get_renderer()
196 203
197 204 repo = self._get_repo(repo)
198 205 user = self._get_user(user)
199 206
200 207 schema = comment_schema.CommentSchema()
201 208 validated_kwargs = schema.deserialize(dict(
202 209 comment_body=text,
203 210 comment_type=comment_type,
204 211 comment_file=f_path,
205 212 comment_line=line_no,
206 213 renderer_type=renderer,
207 214 status_change=status_change_type,
208 215 resolves_comment_id=resolves_comment_id,
209 216 repo=repo.repo_id,
210 217 user=user.user_id,
211 218 ))
212 219
213 220 comment = ChangesetComment()
214 221 comment.renderer = validated_kwargs['renderer_type']
215 222 comment.text = validated_kwargs['comment_body']
216 223 comment.f_path = validated_kwargs['comment_file']
217 224 comment.line_no = validated_kwargs['comment_line']
218 225 comment.comment_type = validated_kwargs['comment_type']
219 226
220 227 comment.repo = repo
221 228 comment.author = user
222 229 comment.resolved_comment = self.__get_commit_comment(
223 230 validated_kwargs['resolves_comment_id'])
224 231
225 232 pull_request_id = pull_request
226 233
227 234 commit_obj = None
228 235 pull_request_obj = None
229 236
230 237 if commit_id:
231 238 notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT
232 239 # do a lookup, so we don't pass something bad here
233 240 commit_obj = repo.scm_instance().get_commit(commit_id=commit_id)
234 241 comment.revision = commit_obj.raw_id
235 242
236 243 elif pull_request_id:
237 244 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT
238 245 pull_request_obj = self.__get_pull_request(pull_request_id)
239 246 comment.pull_request = pull_request_obj
240 247 else:
241 248 raise Exception('Please specify commit or pull_request_id')
242 249
243 250 Session().add(comment)
244 251 Session().flush()
245 252 kwargs = {
246 253 'user': user,
247 254 'renderer_type': renderer,
248 255 'repo_name': repo.repo_name,
249 256 'status_change': status_change,
250 257 'status_change_type': status_change_type,
251 258 'comment_body': text,
252 259 'comment_file': f_path,
253 260 'comment_line': line_no,
254 261 'comment_type': comment_type or 'note'
255 262 }
256 263
257 264 if commit_obj:
258 265 recipients = ChangesetComment.get_users(
259 266 revision=commit_obj.raw_id)
260 267 # add commit author if it's in RhodeCode system
261 268 cs_author = User.get_from_cs_author(commit_obj.author)
262 269 if not cs_author:
263 270 # use repo owner if we cannot extract the author correctly
264 271 cs_author = repo.user
265 272 recipients += [cs_author]
266 273
267 274 commit_comment_url = self.get_url(comment)
268 275
269 276 target_repo_url = h.link_to(
270 277 repo.repo_name,
271 278 h.route_url('repo_summary', repo_name=repo.repo_name))
272 279
273 280 # commit specifics
274 281 kwargs.update({
275 282 'commit': commit_obj,
276 283 'commit_message': commit_obj.message,
277 284 'commit_target_repo': target_repo_url,
278 285 'commit_comment_url': commit_comment_url,
279 286 })
280 287
281 288 elif pull_request_obj:
282 289 # get the current participants of this pull request
283 290 recipients = ChangesetComment.get_users(
284 291 pull_request_id=pull_request_obj.pull_request_id)
285 292 # add pull request author
286 293 recipients += [pull_request_obj.author]
287 294
288 295 # add the reviewers to notification
289 296 recipients += [x.user for x in pull_request_obj.reviewers]
290 297
291 298 pr_target_repo = pull_request_obj.target_repo
292 299 pr_source_repo = pull_request_obj.source_repo
293 300
294 301 pr_comment_url = h.url(
295 302 'pullrequest_show',
296 303 repo_name=pr_target_repo.repo_name,
297 304 pull_request_id=pull_request_obj.pull_request_id,
298 305 anchor='comment-%s' % comment.comment_id,
299 306 qualified=True,)
300 307
301 308 # set some variables for email notification
302 309 pr_target_repo_url = h.route_url(
303 310 'repo_summary', repo_name=pr_target_repo.repo_name)
304 311
305 312 pr_source_repo_url = h.route_url(
306 313 'repo_summary', repo_name=pr_source_repo.repo_name)
307 314
308 315 # pull request specifics
309 316 kwargs.update({
310 317 'pull_request': pull_request_obj,
311 318 'pr_id': pull_request_obj.pull_request_id,
312 319 'pr_target_repo': pr_target_repo,
313 320 'pr_target_repo_url': pr_target_repo_url,
314 321 'pr_source_repo': pr_source_repo,
315 322 'pr_source_repo_url': pr_source_repo_url,
316 323 'pr_comment_url': pr_comment_url,
317 324 'pr_closing': closing_pr,
318 325 })
319 326 if send_email:
320 327 # pre-generate the subject for notification itself
321 328 (subject,
322 329 _h, _e, # we don't care about those
323 330 body_plaintext) = EmailNotificationModel().render_email(
324 331 notification_type, **kwargs)
325 332
326 333 mention_recipients = set(
327 334 self._extract_mentions(text)).difference(recipients)
328 335
329 336 # create notification objects, and emails
330 337 NotificationModel().create(
331 338 created_by=user,
332 339 notification_subject=subject,
333 340 notification_body=body_plaintext,
334 341 notification_type=notification_type,
335 342 recipients=recipients,
336 343 mention_recipients=mention_recipients,
337 344 email_kwargs=kwargs,
338 345 )
339 346
340 action = (
341 'user_commented_pull_request:{}'.format(
342 comment.pull_request.pull_request_id)
343 if comment.pull_request
344 else 'user_commented_revision:{}'.format(comment.revision)
345 )
346 action_logger(user, action, comment.repo)
347 Session().flush()
348 if comment.pull_request:
349 action = 'repo.pull_request.comment.create'
350 else:
351 action = 'repo.commit.comment.create'
352
353 comment_data = comment.get_api_data()
354 self._log_audit_action(
355 action, {'data': comment_data}, user, comment)
347 356
348 357 registry = get_current_registry()
349 358 rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {})
350 359 channelstream_config = rhodecode_plugins.get('channelstream', {})
351 360 msg_url = ''
352 361 if commit_obj:
353 362 msg_url = commit_comment_url
354 363 repo_name = repo.repo_name
355 364 elif pull_request_obj:
356 365 msg_url = pr_comment_url
357 366 repo_name = pr_target_repo.repo_name
358 367
359 368 if channelstream_config.get('enabled'):
360 369 message = '<strong>{}</strong> {} - ' \
361 370 '<a onclick="window.location=\'{}\';' \
362 371 'window.location.reload()">' \
363 372 '<strong>{}</strong></a>'
364 373 message = message.format(
365 374 user.username, _('made a comment'), msg_url,
366 375 _('Show it now'))
367 376 channel = '/repo${}$/pr/{}'.format(
368 377 repo_name,
369 378 pull_request_id
370 379 )
371 380 payload = {
372 381 'type': 'message',
373 382 'timestamp': datetime.utcnow(),
374 383 'user': 'system',
375 384 'exclude_users': [user.username],
376 385 'channel': channel,
377 386 'message': {
378 387 'message': message,
379 388 'level': 'info',
380 389 'topic': '/notifications'
381 390 }
382 391 }
383 392 channelstream_request(channelstream_config, [payload],
384 393 '/message', raise_exc=False)
385 394
386 395 return comment
387 396
388 def delete(self, comment):
397 def delete(self, comment, user):
389 398 """
390 399 Deletes given comment
391
392 :param comment_id:
393 400 """
394 401 comment = self.__get_commit_comment(comment)
402 old_data = comment.get_api_data()
395 403 Session().delete(comment)
396 404
405 if comment.pull_request:
406 action = 'repo.pull_request.comment.delete'
407 else:
408 action = 'repo.commit.comment.delete'
409
410 self._log_audit_action(
411 action, {'old_data': old_data}, user, comment)
412
397 413 return comment
398 414
399 415 def get_all_comments(self, repo_id, revision=None, pull_request=None):
400 416 q = ChangesetComment.query()\
401 417 .filter(ChangesetComment.repo_id == repo_id)
402 418 if revision:
403 419 q = q.filter(ChangesetComment.revision == revision)
404 420 elif pull_request:
405 421 pull_request = self.__get_pull_request(pull_request)
406 422 q = q.filter(ChangesetComment.pull_request == pull_request)
407 423 else:
408 424 raise Exception('Please specify commit or pull_request')
409 425 q = q.order_by(ChangesetComment.created_on)
410 426 return q.all()
411 427
412 428 def get_url(self, comment, request=None, permalink=False):
413 429 if not request:
414 430 request = get_current_request()
415 431
416 432 comment = self.__get_commit_comment(comment)
417 433 if comment.pull_request:
418 434 pull_request = comment.pull_request
419 435 if permalink:
420 436 return request.route_url(
421 437 'pull_requests_global',
422 438 pull_request_id=pull_request.pull_request_id,
423 439 _anchor='comment-%s' % comment.comment_id)
424 440 else:
425 441 return request.route_url(
426 442 'pullrequest_show',
427 443 repo_name=safe_str(pull_request.target_repo.repo_name),
428 444 pull_request_id=pull_request.pull_request_id,
429 445 _anchor='comment-%s' % comment.comment_id)
430 446
431 447 else:
432 448 repo = comment.repo
433 449 commit_id = comment.revision
434 450
435 451 if permalink:
436 452 return request.route_url(
437 453 'repo_commit', repo_name=safe_str(repo.repo_id),
438 454 commit_id=commit_id,
439 455 _anchor='comment-%s' % comment.comment_id)
440 456
441 457 else:
442 458 return request.route_url(
443 459 'repo_commit', repo_name=safe_str(repo.repo_name),
444 460 commit_id=commit_id,
445 461 _anchor='comment-%s' % comment.comment_id)
446 462
447 463 def get_comments(self, repo_id, revision=None, pull_request=None):
448 464 """
449 465 Gets main comments based on revision or pull_request_id
450 466
451 467 :param repo_id:
452 468 :param revision:
453 469 :param pull_request:
454 470 """
455 471
456 472 q = ChangesetComment.query()\
457 473 .filter(ChangesetComment.repo_id == repo_id)\
458 474 .filter(ChangesetComment.line_no == None)\
459 475 .filter(ChangesetComment.f_path == None)
460 476 if revision:
461 477 q = q.filter(ChangesetComment.revision == revision)
462 478 elif pull_request:
463 479 pull_request = self.__get_pull_request(pull_request)
464 480 q = q.filter(ChangesetComment.pull_request == pull_request)
465 481 else:
466 482 raise Exception('Please specify commit or pull_request')
467 483 q = q.order_by(ChangesetComment.created_on)
468 484 return q.all()
469 485
470 486 def get_inline_comments(self, repo_id, revision=None, pull_request=None):
471 487 q = self._get_inline_comments_query(repo_id, revision, pull_request)
472 488 return self._group_comments_by_path_and_line_number(q)
473 489
474 490 def get_inline_comments_count(self, inline_comments, skip_outdated=True,
475 491 version=None):
476 492 inline_cnt = 0
477 493 for fname, per_line_comments in inline_comments.iteritems():
478 494 for lno, comments in per_line_comments.iteritems():
479 495 for comm in comments:
480 496 if not comm.outdated_at_version(version) and skip_outdated:
481 497 inline_cnt += 1
482 498
483 499 return inline_cnt
484 500
485 501 def get_outdated_comments(self, repo_id, pull_request):
486 502 # TODO: johbo: Remove `repo_id`, it is not needed to find the comments
487 503 # of a pull request.
488 504 q = self._all_inline_comments_of_pull_request(pull_request)
489 505 q = q.filter(
490 506 ChangesetComment.display_state ==
491 507 ChangesetComment.COMMENT_OUTDATED
492 508 ).order_by(ChangesetComment.comment_id.asc())
493 509
494 510 return self._group_comments_by_path_and_line_number(q)
495 511
496 512 def _get_inline_comments_query(self, repo_id, revision, pull_request):
497 513 # TODO: johbo: Split this into two methods: One for PR and one for
498 514 # commit.
499 515 if revision:
500 516 q = Session().query(ChangesetComment).filter(
501 517 ChangesetComment.repo_id == repo_id,
502 518 ChangesetComment.line_no != null(),
503 519 ChangesetComment.f_path != null(),
504 520 ChangesetComment.revision == revision)
505 521
506 522 elif pull_request:
507 523 pull_request = self.__get_pull_request(pull_request)
508 524 if not CommentsModel.use_outdated_comments(pull_request):
509 525 q = self._visible_inline_comments_of_pull_request(pull_request)
510 526 else:
511 527 q = self._all_inline_comments_of_pull_request(pull_request)
512 528
513 529 else:
514 530 raise Exception('Please specify commit or pull_request_id')
515 531 q = q.order_by(ChangesetComment.comment_id.asc())
516 532 return q
517 533
518 534 def _group_comments_by_path_and_line_number(self, q):
519 535 comments = q.all()
520 536 paths = collections.defaultdict(lambda: collections.defaultdict(list))
521 537 for co in comments:
522 538 paths[co.f_path][co.line_no].append(co)
523 539 return paths
524 540
525 541 @classmethod
526 542 def needed_extra_diff_context(cls):
527 543 return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER)
528 544
529 545 def outdate_comments(self, pull_request, old_diff_data, new_diff_data):
530 546 if not CommentsModel.use_outdated_comments(pull_request):
531 547 return
532 548
533 549 comments = self._visible_inline_comments_of_pull_request(pull_request)
534 550 comments_to_outdate = comments.all()
535 551
536 552 for comment in comments_to_outdate:
537 553 self._outdate_one_comment(comment, old_diff_data, new_diff_data)
538 554
539 555 def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc):
540 556 diff_line = _parse_comment_line_number(comment.line_no)
541 557
542 558 try:
543 559 old_context = old_diff_proc.get_context_of_line(
544 560 path=comment.f_path, diff_line=diff_line)
545 561 new_context = new_diff_proc.get_context_of_line(
546 562 path=comment.f_path, diff_line=diff_line)
547 563 except (diffs.LineNotInDiffException,
548 564 diffs.FileNotInDiffException):
549 565 comment.display_state = ChangesetComment.COMMENT_OUTDATED
550 566 return
551 567
552 568 if old_context == new_context:
553 569 return
554 570
555 571 if self._should_relocate_diff_line(diff_line):
556 572 new_diff_lines = new_diff_proc.find_context(
557 573 path=comment.f_path, context=old_context,
558 574 offset=self.DIFF_CONTEXT_BEFORE)
559 575 if not new_diff_lines:
560 576 comment.display_state = ChangesetComment.COMMENT_OUTDATED
561 577 else:
562 578 new_diff_line = self._choose_closest_diff_line(
563 579 diff_line, new_diff_lines)
564 580 comment.line_no = _diff_to_comment_line_number(new_diff_line)
565 581 else:
566 582 comment.display_state = ChangesetComment.COMMENT_OUTDATED
567 583
568 584 def _should_relocate_diff_line(self, diff_line):
569 585 """
570 586 Checks if relocation shall be tried for the given `diff_line`.
571 587
572 588 If a comment points into the first lines, then we can have a situation
573 589 that after an update another line has been added on top. In this case
574 590 we would find the context still and move the comment around. This
575 591 would be wrong.
576 592 """
577 593 should_relocate = (
578 594 (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or
579 595 (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE))
580 596 return should_relocate
581 597
582 598 def _choose_closest_diff_line(self, diff_line, new_diff_lines):
583 599 candidate = new_diff_lines[0]
584 600 best_delta = _diff_line_delta(diff_line, candidate)
585 601 for new_diff_line in new_diff_lines[1:]:
586 602 delta = _diff_line_delta(diff_line, new_diff_line)
587 603 if delta < best_delta:
588 604 candidate = new_diff_line
589 605 best_delta = delta
590 606 return candidate
591 607
592 608 def _visible_inline_comments_of_pull_request(self, pull_request):
593 609 comments = self._all_inline_comments_of_pull_request(pull_request)
594 610 comments = comments.filter(
595 611 coalesce(ChangesetComment.display_state, '') !=
596 612 ChangesetComment.COMMENT_OUTDATED)
597 613 return comments
598 614
599 615 def _all_inline_comments_of_pull_request(self, pull_request):
600 616 comments = Session().query(ChangesetComment)\
601 617 .filter(ChangesetComment.line_no != None)\
602 618 .filter(ChangesetComment.f_path != None)\
603 619 .filter(ChangesetComment.pull_request == pull_request)
604 620 return comments
605 621
606 622 def _all_general_comments_of_pull_request(self, pull_request):
607 623 comments = Session().query(ChangesetComment)\
608 624 .filter(ChangesetComment.line_no == None)\
609 625 .filter(ChangesetComment.f_path == None)\
610 626 .filter(ChangesetComment.pull_request == pull_request)
611 627 return comments
612 628
613 629 @staticmethod
614 630 def use_outdated_comments(pull_request):
615 631 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
616 632 settings = settings_model.get_general_settings()
617 633 return settings.get('rhodecode_use_outdated_comments', False)
618 634
619 635
620 636 def _parse_comment_line_number(line_no):
621 637 """
622 638 Parses line numbers of the form "(o|n)\d+" and returns them in a tuple.
623 639 """
624 640 old_line = None
625 641 new_line = None
626 642 if line_no.startswith('o'):
627 643 old_line = int(line_no[1:])
628 644 elif line_no.startswith('n'):
629 645 new_line = int(line_no[1:])
630 646 else:
631 647 raise ValueError("Comment lines have to start with either 'o' or 'n'.")
632 648 return diffs.DiffLineNumber(old_line, new_line)
633 649
634 650
635 651 def _diff_to_comment_line_number(diff_line):
636 652 if diff_line.new is not None:
637 653 return u'n{}'.format(diff_line.new)
638 654 elif diff_line.old is not None:
639 655 return u'o{}'.format(diff_line.old)
640 656 return u''
641 657
642 658
643 659 def _diff_line_delta(a, b):
644 660 if None not in (a.new, b.new):
645 661 return abs(a.new - b.new)
646 662 elif None not in (a.old, b.old):
647 663 return abs(a.old - b.old)
648 664 else:
649 665 raise ValueError(
650 666 "Cannot compute delta between {} and {}".format(a, b))
@@ -1,4031 +1,4065 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import hashlib
29 29 import logging
30 30 import datetime
31 31 import warnings
32 32 import ipaddress
33 33 import functools
34 34 import traceback
35 35 import collections
36 36
37 37
38 38 from sqlalchemy import *
39 39 from sqlalchemy.ext.declarative import declared_attr
40 40 from sqlalchemy.ext.hybrid import hybrid_property
41 41 from sqlalchemy.orm import (
42 42 relationship, joinedload, class_mapper, validates, aliased)
43 43 from sqlalchemy.sql.expression import true
44 44 from beaker.cache import cache_region
45 45 from zope.cachedescriptors.property import Lazy as LazyProperty
46 46
47 47 from pylons.i18n.translation import lazy_ugettext as _
48 48 from pyramid.threadlocal import get_current_request
49 49
50 50 from rhodecode.lib.vcs import get_vcs_instance
51 51 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
52 52 from rhodecode.lib.utils2 import (
53 53 str2bool, safe_str, get_commit_safe, safe_unicode, md5_safe,
54 54 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
55 55 glob2re, StrictAttributeDict, cleaned_uri)
56 56 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType
57 57 from rhodecode.lib.ext_json import json
58 58 from rhodecode.lib.caching_query import FromCache
59 59 from rhodecode.lib.encrypt import AESCipher
60 60
61 61 from rhodecode.model.meta import Base, Session
62 62
63 63 URL_SEP = '/'
64 64 log = logging.getLogger(__name__)
65 65
66 66 # =============================================================================
67 67 # BASE CLASSES
68 68 # =============================================================================
69 69
70 70 # this is propagated from .ini file rhodecode.encrypted_values.secret or
71 71 # beaker.session.secret if first is not set.
72 72 # and initialized at environment.py
73 73 ENCRYPTION_KEY = None
74 74
75 75 # used to sort permissions by types, '#' used here is not allowed to be in
76 76 # usernames, and it's very early in sorted string.printable table.
77 77 PERMISSION_TYPE_SORT = {
78 78 'admin': '####',
79 79 'write': '###',
80 80 'read': '##',
81 81 'none': '#',
82 82 }
83 83
84 84
85 85 def display_sort(obj):
86 86 """
87 87 Sort function used to sort permissions in .permissions() function of
88 88 Repository, RepoGroup, UserGroup. Also it put the default user in front
89 89 of all other resources
90 90 """
91 91
92 92 if obj.username == User.DEFAULT_USER:
93 93 return '#####'
94 94 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
95 95 return prefix + obj.username
96 96
97 97
98 98 def _hash_key(k):
99 99 return md5_safe(k)
100 100
101 101
102 102 class EncryptedTextValue(TypeDecorator):
103 103 """
104 104 Special column for encrypted long text data, use like::
105 105
106 106 value = Column("encrypted_value", EncryptedValue(), nullable=False)
107 107
108 108 This column is intelligent so if value is in unencrypted form it return
109 109 unencrypted form, but on save it always encrypts
110 110 """
111 111 impl = Text
112 112
113 113 def process_bind_param(self, value, dialect):
114 114 if not value:
115 115 return value
116 116 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
117 117 # protect against double encrypting if someone manually starts
118 118 # doing
119 119 raise ValueError('value needs to be in unencrypted format, ie. '
120 120 'not starting with enc$aes')
121 121 return 'enc$aes_hmac$%s' % AESCipher(
122 122 ENCRYPTION_KEY, hmac=True).encrypt(value)
123 123
124 124 def process_result_value(self, value, dialect):
125 125 import rhodecode
126 126
127 127 if not value:
128 128 return value
129 129
130 130 parts = value.split('$', 3)
131 131 if not len(parts) == 3:
132 132 # probably not encrypted values
133 133 return value
134 134 else:
135 135 if parts[0] != 'enc':
136 136 # parts ok but without our header ?
137 137 return value
138 138 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
139 139 'rhodecode.encrypted_values.strict') or True)
140 140 # at that stage we know it's our encryption
141 141 if parts[1] == 'aes':
142 142 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
143 143 elif parts[1] == 'aes_hmac':
144 144 decrypted_data = AESCipher(
145 145 ENCRYPTION_KEY, hmac=True,
146 146 strict_verification=enc_strict_mode).decrypt(parts[2])
147 147 else:
148 148 raise ValueError(
149 149 'Encryption type part is wrong, must be `aes` '
150 150 'or `aes_hmac`, got `%s` instead' % (parts[1]))
151 151 return decrypted_data
152 152
153 153
154 154 class BaseModel(object):
155 155 """
156 156 Base Model for all classes
157 157 """
158 158
159 159 @classmethod
160 160 def _get_keys(cls):
161 161 """return column names for this model """
162 162 return class_mapper(cls).c.keys()
163 163
164 164 def get_dict(self):
165 165 """
166 166 return dict with keys and values corresponding
167 167 to this model data """
168 168
169 169 d = {}
170 170 for k in self._get_keys():
171 171 d[k] = getattr(self, k)
172 172
173 173 # also use __json__() if present to get additional fields
174 174 _json_attr = getattr(self, '__json__', None)
175 175 if _json_attr:
176 176 # update with attributes from __json__
177 177 if callable(_json_attr):
178 178 _json_attr = _json_attr()
179 179 for k, val in _json_attr.iteritems():
180 180 d[k] = val
181 181 return d
182 182
183 183 def get_appstruct(self):
184 184 """return list with keys and values tuples corresponding
185 185 to this model data """
186 186
187 187 l = []
188 188 for k in self._get_keys():
189 189 l.append((k, getattr(self, k),))
190 190 return l
191 191
192 192 def populate_obj(self, populate_dict):
193 193 """populate model with data from given populate_dict"""
194 194
195 195 for k in self._get_keys():
196 196 if k in populate_dict:
197 197 setattr(self, k, populate_dict[k])
198 198
199 199 @classmethod
200 200 def query(cls):
201 201 return Session().query(cls)
202 202
203 203 @classmethod
204 204 def get(cls, id_):
205 205 if id_:
206 206 return cls.query().get(id_)
207 207
208 208 @classmethod
209 209 def get_or_404(cls, id_, pyramid_exc=False):
210 210 if pyramid_exc:
211 211 # NOTE(marcink): backward compat, once migration to pyramid
212 212 # this should only use pyramid exceptions
213 213 from pyramid.httpexceptions import HTTPNotFound
214 214 else:
215 215 from webob.exc import HTTPNotFound
216 216
217 217 try:
218 218 id_ = int(id_)
219 219 except (TypeError, ValueError):
220 220 raise HTTPNotFound
221 221
222 222 res = cls.query().get(id_)
223 223 if not res:
224 224 raise HTTPNotFound
225 225 return res
226 226
227 227 @classmethod
228 228 def getAll(cls):
229 229 # deprecated and left for backward compatibility
230 230 return cls.get_all()
231 231
232 232 @classmethod
233 233 def get_all(cls):
234 234 return cls.query().all()
235 235
236 236 @classmethod
237 237 def delete(cls, id_):
238 238 obj = cls.query().get(id_)
239 239 Session().delete(obj)
240 240
241 241 @classmethod
242 242 def identity_cache(cls, session, attr_name, value):
243 243 exist_in_session = []
244 244 for (item_cls, pkey), instance in session.identity_map.items():
245 245 if cls == item_cls and getattr(instance, attr_name) == value:
246 246 exist_in_session.append(instance)
247 247 if exist_in_session:
248 248 if len(exist_in_session) == 1:
249 249 return exist_in_session[0]
250 250 log.exception(
251 251 'multiple objects with attr %s and '
252 252 'value %s found with same name: %r',
253 253 attr_name, value, exist_in_session)
254 254
255 255 def __repr__(self):
256 256 if hasattr(self, '__unicode__'):
257 257 # python repr needs to return str
258 258 try:
259 259 return safe_str(self.__unicode__())
260 260 except UnicodeDecodeError:
261 261 pass
262 262 return '<DB:%s>' % (self.__class__.__name__)
263 263
264 264
265 265 class RhodeCodeSetting(Base, BaseModel):
266 266 __tablename__ = 'rhodecode_settings'
267 267 __table_args__ = (
268 268 UniqueConstraint('app_settings_name'),
269 269 {'extend_existing': True, 'mysql_engine': 'InnoDB',
270 270 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
271 271 )
272 272
273 273 SETTINGS_TYPES = {
274 274 'str': safe_str,
275 275 'int': safe_int,
276 276 'unicode': safe_unicode,
277 277 'bool': str2bool,
278 278 'list': functools.partial(aslist, sep=',')
279 279 }
280 280 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
281 281 GLOBAL_CONF_KEY = 'app_settings'
282 282
283 283 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
284 284 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
285 285 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
286 286 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
287 287
288 288 def __init__(self, key='', val='', type='unicode'):
289 289 self.app_settings_name = key
290 290 self.app_settings_type = type
291 291 self.app_settings_value = val
292 292
293 293 @validates('_app_settings_value')
294 294 def validate_settings_value(self, key, val):
295 295 assert type(val) == unicode
296 296 return val
297 297
298 298 @hybrid_property
299 299 def app_settings_value(self):
300 300 v = self._app_settings_value
301 301 _type = self.app_settings_type
302 302 if _type:
303 303 _type = self.app_settings_type.split('.')[0]
304 304 # decode the encrypted value
305 305 if 'encrypted' in self.app_settings_type:
306 306 cipher = EncryptedTextValue()
307 307 v = safe_unicode(cipher.process_result_value(v, None))
308 308
309 309 converter = self.SETTINGS_TYPES.get(_type) or \
310 310 self.SETTINGS_TYPES['unicode']
311 311 return converter(v)
312 312
313 313 @app_settings_value.setter
314 314 def app_settings_value(self, val):
315 315 """
316 316 Setter that will always make sure we use unicode in app_settings_value
317 317
318 318 :param val:
319 319 """
320 320 val = safe_unicode(val)
321 321 # encode the encrypted value
322 322 if 'encrypted' in self.app_settings_type:
323 323 cipher = EncryptedTextValue()
324 324 val = safe_unicode(cipher.process_bind_param(val, None))
325 325 self._app_settings_value = val
326 326
327 327 @hybrid_property
328 328 def app_settings_type(self):
329 329 return self._app_settings_type
330 330
331 331 @app_settings_type.setter
332 332 def app_settings_type(self, val):
333 333 if val.split('.')[0] not in self.SETTINGS_TYPES:
334 334 raise Exception('type must be one of %s got %s'
335 335 % (self.SETTINGS_TYPES.keys(), val))
336 336 self._app_settings_type = val
337 337
338 338 def __unicode__(self):
339 339 return u"<%s('%s:%s[%s]')>" % (
340 340 self.__class__.__name__,
341 341 self.app_settings_name, self.app_settings_value,
342 342 self.app_settings_type
343 343 )
344 344
345 345
346 346 class RhodeCodeUi(Base, BaseModel):
347 347 __tablename__ = 'rhodecode_ui'
348 348 __table_args__ = (
349 349 UniqueConstraint('ui_key'),
350 350 {'extend_existing': True, 'mysql_engine': 'InnoDB',
351 351 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
352 352 )
353 353
354 354 HOOK_REPO_SIZE = 'changegroup.repo_size'
355 355 # HG
356 356 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
357 357 HOOK_PULL = 'outgoing.pull_logger'
358 358 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
359 359 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
360 360 HOOK_PUSH = 'changegroup.push_logger'
361 361 HOOK_PUSH_KEY = 'pushkey.key_push'
362 362
363 363 # TODO: johbo: Unify way how hooks are configured for git and hg,
364 364 # git part is currently hardcoded.
365 365
366 366 # SVN PATTERNS
367 367 SVN_BRANCH_ID = 'vcs_svn_branch'
368 368 SVN_TAG_ID = 'vcs_svn_tag'
369 369
370 370 ui_id = Column(
371 371 "ui_id", Integer(), nullable=False, unique=True, default=None,
372 372 primary_key=True)
373 373 ui_section = Column(
374 374 "ui_section", String(255), nullable=True, unique=None, default=None)
375 375 ui_key = Column(
376 376 "ui_key", String(255), nullable=True, unique=None, default=None)
377 377 ui_value = Column(
378 378 "ui_value", String(255), nullable=True, unique=None, default=None)
379 379 ui_active = Column(
380 380 "ui_active", Boolean(), nullable=True, unique=None, default=True)
381 381
382 382 def __repr__(self):
383 383 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
384 384 self.ui_key, self.ui_value)
385 385
386 386
387 387 class RepoRhodeCodeSetting(Base, BaseModel):
388 388 __tablename__ = 'repo_rhodecode_settings'
389 389 __table_args__ = (
390 390 UniqueConstraint(
391 391 'app_settings_name', 'repository_id',
392 392 name='uq_repo_rhodecode_setting_name_repo_id'),
393 393 {'extend_existing': True, 'mysql_engine': 'InnoDB',
394 394 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
395 395 )
396 396
397 397 repository_id = Column(
398 398 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
399 399 nullable=False)
400 400 app_settings_id = Column(
401 401 "app_settings_id", Integer(), nullable=False, unique=True,
402 402 default=None, primary_key=True)
403 403 app_settings_name = Column(
404 404 "app_settings_name", String(255), nullable=True, unique=None,
405 405 default=None)
406 406 _app_settings_value = Column(
407 407 "app_settings_value", String(4096), nullable=True, unique=None,
408 408 default=None)
409 409 _app_settings_type = Column(
410 410 "app_settings_type", String(255), nullable=True, unique=None,
411 411 default=None)
412 412
413 413 repository = relationship('Repository')
414 414
415 415 def __init__(self, repository_id, key='', val='', type='unicode'):
416 416 self.repository_id = repository_id
417 417 self.app_settings_name = key
418 418 self.app_settings_type = type
419 419 self.app_settings_value = val
420 420
421 421 @validates('_app_settings_value')
422 422 def validate_settings_value(self, key, val):
423 423 assert type(val) == unicode
424 424 return val
425 425
426 426 @hybrid_property
427 427 def app_settings_value(self):
428 428 v = self._app_settings_value
429 429 type_ = self.app_settings_type
430 430 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
431 431 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
432 432 return converter(v)
433 433
434 434 @app_settings_value.setter
435 435 def app_settings_value(self, val):
436 436 """
437 437 Setter that will always make sure we use unicode in app_settings_value
438 438
439 439 :param val:
440 440 """
441 441 self._app_settings_value = safe_unicode(val)
442 442
443 443 @hybrid_property
444 444 def app_settings_type(self):
445 445 return self._app_settings_type
446 446
447 447 @app_settings_type.setter
448 448 def app_settings_type(self, val):
449 449 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
450 450 if val not in SETTINGS_TYPES:
451 451 raise Exception('type must be one of %s got %s'
452 452 % (SETTINGS_TYPES.keys(), val))
453 453 self._app_settings_type = val
454 454
455 455 def __unicode__(self):
456 456 return u"<%s('%s:%s:%s[%s]')>" % (
457 457 self.__class__.__name__, self.repository.repo_name,
458 458 self.app_settings_name, self.app_settings_value,
459 459 self.app_settings_type
460 460 )
461 461
462 462
463 463 class RepoRhodeCodeUi(Base, BaseModel):
464 464 __tablename__ = 'repo_rhodecode_ui'
465 465 __table_args__ = (
466 466 UniqueConstraint(
467 467 'repository_id', 'ui_section', 'ui_key',
468 468 name='uq_repo_rhodecode_ui_repository_id_section_key'),
469 469 {'extend_existing': True, 'mysql_engine': 'InnoDB',
470 470 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
471 471 )
472 472
473 473 repository_id = Column(
474 474 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
475 475 nullable=False)
476 476 ui_id = Column(
477 477 "ui_id", Integer(), nullable=False, unique=True, default=None,
478 478 primary_key=True)
479 479 ui_section = Column(
480 480 "ui_section", String(255), nullable=True, unique=None, default=None)
481 481 ui_key = Column(
482 482 "ui_key", String(255), nullable=True, unique=None, default=None)
483 483 ui_value = Column(
484 484 "ui_value", String(255), nullable=True, unique=None, default=None)
485 485 ui_active = Column(
486 486 "ui_active", Boolean(), nullable=True, unique=None, default=True)
487 487
488 488 repository = relationship('Repository')
489 489
490 490 def __repr__(self):
491 491 return '<%s[%s:%s]%s=>%s]>' % (
492 492 self.__class__.__name__, self.repository.repo_name,
493 493 self.ui_section, self.ui_key, self.ui_value)
494 494
495 495
496 496 class User(Base, BaseModel):
497 497 __tablename__ = 'users'
498 498 __table_args__ = (
499 499 UniqueConstraint('username'), UniqueConstraint('email'),
500 500 Index('u_username_idx', 'username'),
501 501 Index('u_email_idx', 'email'),
502 502 {'extend_existing': True, 'mysql_engine': 'InnoDB',
503 503 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
504 504 )
505 505 DEFAULT_USER = 'default'
506 506 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
507 507 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
508 508
509 509 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
510 510 username = Column("username", String(255), nullable=True, unique=None, default=None)
511 511 password = Column("password", String(255), nullable=True, unique=None, default=None)
512 512 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
513 513 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
514 514 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
515 515 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
516 516 _email = Column("email", String(255), nullable=True, unique=None, default=None)
517 517 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
518 518 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
519 519
520 520 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
521 521 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
522 522 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
523 523 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
524 524 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
525 525 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
526 526
527 527 user_log = relationship('UserLog')
528 528 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
529 529
530 530 repositories = relationship('Repository')
531 531 repository_groups = relationship('RepoGroup')
532 532 user_groups = relationship('UserGroup')
533 533
534 534 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
535 535 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
536 536
537 537 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
538 538 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
539 539 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
540 540
541 541 group_member = relationship('UserGroupMember', cascade='all')
542 542
543 543 notifications = relationship('UserNotification', cascade='all')
544 544 # notifications assigned to this user
545 545 user_created_notifications = relationship('Notification', cascade='all')
546 546 # comments created by this user
547 547 user_comments = relationship('ChangesetComment', cascade='all')
548 548 # user profile extra info
549 549 user_emails = relationship('UserEmailMap', cascade='all')
550 550 user_ip_map = relationship('UserIpMap', cascade='all')
551 551 user_auth_tokens = relationship('UserApiKeys', cascade='all')
552 552 # gists
553 553 user_gists = relationship('Gist', cascade='all')
554 554 # user pull requests
555 555 user_pull_requests = relationship('PullRequest', cascade='all')
556 556 # external identities
557 557 extenal_identities = relationship(
558 558 'ExternalIdentity',
559 559 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
560 560 cascade='all')
561 561
562 562 def __unicode__(self):
563 563 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
564 564 self.user_id, self.username)
565 565
566 566 @hybrid_property
567 567 def email(self):
568 568 return self._email
569 569
570 570 @email.setter
571 571 def email(self, val):
572 572 self._email = val.lower() if val else None
573 573
574 574 @hybrid_property
575 575 def api_key(self):
576 576 """
577 577 Fetch if exist an auth-token with role ALL connected to this user
578 578 """
579 579 user_auth_token = UserApiKeys.query()\
580 580 .filter(UserApiKeys.user_id == self.user_id)\
581 581 .filter(or_(UserApiKeys.expires == -1,
582 582 UserApiKeys.expires >= time.time()))\
583 583 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
584 584 if user_auth_token:
585 585 user_auth_token = user_auth_token.api_key
586 586
587 587 return user_auth_token
588 588
589 589 @api_key.setter
590 590 def api_key(self, val):
591 591 # don't allow to set API key this is deprecated for now
592 592 self._api_key = None
593 593
594 594 @property
595 595 def firstname(self):
596 596 # alias for future
597 597 return self.name
598 598
599 599 @property
600 600 def emails(self):
601 601 other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
602 602 return [self.email] + [x.email for x in other]
603 603
604 604 @property
605 605 def auth_tokens(self):
606 606 return [x.api_key for x in self.extra_auth_tokens]
607 607
608 608 @property
609 609 def extra_auth_tokens(self):
610 610 return UserApiKeys.query().filter(UserApiKeys.user == self).all()
611 611
612 612 @property
613 613 def feed_token(self):
614 614 return self.get_feed_token()
615 615
616 616 def get_feed_token(self):
617 617 feed_tokens = UserApiKeys.query()\
618 618 .filter(UserApiKeys.user == self)\
619 619 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)\
620 620 .all()
621 621 if feed_tokens:
622 622 return feed_tokens[0].api_key
623 623 return 'NO_FEED_TOKEN_AVAILABLE'
624 624
625 625 @classmethod
626 626 def extra_valid_auth_tokens(cls, user, role=None):
627 627 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
628 628 .filter(or_(UserApiKeys.expires == -1,
629 629 UserApiKeys.expires >= time.time()))
630 630 if role:
631 631 tokens = tokens.filter(or_(UserApiKeys.role == role,
632 632 UserApiKeys.role == UserApiKeys.ROLE_ALL))
633 633 return tokens.all()
634 634
635 635 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
636 636 from rhodecode.lib import auth
637 637
638 638 log.debug('Trying to authenticate user: %s via auth-token, '
639 639 'and roles: %s', self, roles)
640 640
641 641 if not auth_token:
642 642 return False
643 643
644 644 crypto_backend = auth.crypto_backend()
645 645
646 646 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
647 647 tokens_q = UserApiKeys.query()\
648 648 .filter(UserApiKeys.user_id == self.user_id)\
649 649 .filter(or_(UserApiKeys.expires == -1,
650 650 UserApiKeys.expires >= time.time()))
651 651
652 652 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
653 653
654 654 plain_tokens = []
655 655 hash_tokens = []
656 656
657 657 for token in tokens_q.all():
658 658 # verify scope first
659 659 if token.repo_id:
660 660 # token has a scope, we need to verify it
661 661 if scope_repo_id != token.repo_id:
662 662 log.debug(
663 663 'Scope mismatch: token has a set repo scope: %s, '
664 664 'and calling scope is:%s, skipping further checks',
665 665 token.repo, scope_repo_id)
666 666 # token has a scope, and it doesn't match, skip token
667 667 continue
668 668
669 669 if token.api_key.startswith(crypto_backend.ENC_PREF):
670 670 hash_tokens.append(token.api_key)
671 671 else:
672 672 plain_tokens.append(token.api_key)
673 673
674 674 is_plain_match = auth_token in plain_tokens
675 675 if is_plain_match:
676 676 return True
677 677
678 678 for hashed in hash_tokens:
679 679 # TODO(marcink): this is expensive to calculate, but most secure
680 680 match = crypto_backend.hash_check(auth_token, hashed)
681 681 if match:
682 682 return True
683 683
684 684 return False
685 685
686 686 @property
687 687 def ip_addresses(self):
688 688 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
689 689 return [x.ip_addr for x in ret]
690 690
691 691 @property
692 692 def username_and_name(self):
693 693 return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
694 694
695 695 @property
696 696 def username_or_name_or_email(self):
697 697 full_name = self.full_name if self.full_name is not ' ' else None
698 698 return self.username or full_name or self.email
699 699
700 700 @property
701 701 def full_name(self):
702 702 return '%s %s' % (self.firstname, self.lastname)
703 703
704 704 @property
705 705 def full_name_or_username(self):
706 706 return ('%s %s' % (self.firstname, self.lastname)
707 707 if (self.firstname and self.lastname) else self.username)
708 708
709 709 @property
710 710 def full_contact(self):
711 711 return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
712 712
713 713 @property
714 714 def short_contact(self):
715 715 return '%s %s' % (self.firstname, self.lastname)
716 716
717 717 @property
718 718 def is_admin(self):
719 719 return self.admin
720 720
721 721 @property
722 722 def AuthUser(self):
723 723 """
724 724 Returns instance of AuthUser for this user
725 725 """
726 726 from rhodecode.lib.auth import AuthUser
727 727 return AuthUser(user_id=self.user_id, username=self.username)
728 728
729 729 @hybrid_property
730 730 def user_data(self):
731 731 if not self._user_data:
732 732 return {}
733 733
734 734 try:
735 735 return json.loads(self._user_data)
736 736 except TypeError:
737 737 return {}
738 738
739 739 @user_data.setter
740 740 def user_data(self, val):
741 741 if not isinstance(val, dict):
742 742 raise Exception('user_data must be dict, got %s' % type(val))
743 743 try:
744 744 self._user_data = json.dumps(val)
745 745 except Exception:
746 746 log.error(traceback.format_exc())
747 747
748 748 @classmethod
749 749 def get_by_username(cls, username, case_insensitive=False,
750 750 cache=False, identity_cache=False):
751 751 session = Session()
752 752
753 753 if case_insensitive:
754 754 q = cls.query().filter(
755 755 func.lower(cls.username) == func.lower(username))
756 756 else:
757 757 q = cls.query().filter(cls.username == username)
758 758
759 759 if cache:
760 760 if identity_cache:
761 761 val = cls.identity_cache(session, 'username', username)
762 762 if val:
763 763 return val
764 764 else:
765 765 cache_key = "get_user_by_name_%s" % _hash_key(username)
766 766 q = q.options(
767 767 FromCache("sql_cache_short", cache_key))
768 768
769 769 return q.scalar()
770 770
771 771 @classmethod
772 772 def get_by_auth_token(cls, auth_token, cache=False):
773 773 q = UserApiKeys.query()\
774 774 .filter(UserApiKeys.api_key == auth_token)\
775 775 .filter(or_(UserApiKeys.expires == -1,
776 776 UserApiKeys.expires >= time.time()))
777 777 if cache:
778 778 q = q.options(
779 779 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
780 780
781 781 match = q.first()
782 782 if match:
783 783 return match.user
784 784
785 785 @classmethod
786 786 def get_by_email(cls, email, case_insensitive=False, cache=False):
787 787
788 788 if case_insensitive:
789 789 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
790 790
791 791 else:
792 792 q = cls.query().filter(cls.email == email)
793 793
794 794 email_key = _hash_key(email)
795 795 if cache:
796 796 q = q.options(
797 797 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
798 798
799 799 ret = q.scalar()
800 800 if ret is None:
801 801 q = UserEmailMap.query()
802 802 # try fetching in alternate email map
803 803 if case_insensitive:
804 804 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
805 805 else:
806 806 q = q.filter(UserEmailMap.email == email)
807 807 q = q.options(joinedload(UserEmailMap.user))
808 808 if cache:
809 809 q = q.options(
810 810 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
811 811 ret = getattr(q.scalar(), 'user', None)
812 812
813 813 return ret
814 814
815 815 @classmethod
816 816 def get_from_cs_author(cls, author):
817 817 """
818 818 Tries to get User objects out of commit author string
819 819
820 820 :param author:
821 821 """
822 822 from rhodecode.lib.helpers import email, author_name
823 823 # Valid email in the attribute passed, see if they're in the system
824 824 _email = email(author)
825 825 if _email:
826 826 user = cls.get_by_email(_email, case_insensitive=True)
827 827 if user:
828 828 return user
829 829 # Maybe we can match by username?
830 830 _author = author_name(author)
831 831 user = cls.get_by_username(_author, case_insensitive=True)
832 832 if user:
833 833 return user
834 834
835 835 def update_userdata(self, **kwargs):
836 836 usr = self
837 837 old = usr.user_data
838 838 old.update(**kwargs)
839 839 usr.user_data = old
840 840 Session().add(usr)
841 841 log.debug('updated userdata with ', kwargs)
842 842
843 843 def update_lastlogin(self):
844 844 """Update user lastlogin"""
845 845 self.last_login = datetime.datetime.now()
846 846 Session().add(self)
847 847 log.debug('updated user %s lastlogin', self.username)
848 848
849 849 def update_lastactivity(self):
850 850 """Update user lastactivity"""
851 851 self.last_activity = datetime.datetime.now()
852 852 Session().add(self)
853 853 log.debug('updated user %s lastactivity', self.username)
854 854
855 855 def update_password(self, new_password):
856 856 from rhodecode.lib.auth import get_crypt_password
857 857
858 858 self.password = get_crypt_password(new_password)
859 859 Session().add(self)
860 860
861 861 @classmethod
862 862 def get_first_super_admin(cls):
863 863 user = User.query().filter(User.admin == true()).first()
864 864 if user is None:
865 865 raise Exception('FATAL: Missing administrative account!')
866 866 return user
867 867
868 868 @classmethod
869 869 def get_all_super_admins(cls):
870 870 """
871 871 Returns all admin accounts sorted by username
872 872 """
873 873 return User.query().filter(User.admin == true())\
874 874 .order_by(User.username.asc()).all()
875 875
876 876 @classmethod
877 877 def get_default_user(cls, cache=False, refresh=False):
878 878 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
879 879 if user is None:
880 880 raise Exception('FATAL: Missing default account!')
881 881 if refresh:
882 882 # The default user might be based on outdated state which
883 883 # has been loaded from the cache.
884 884 # A call to refresh() ensures that the
885 885 # latest state from the database is used.
886 886 Session().refresh(user)
887 887 return user
888 888
889 889 def _get_default_perms(self, user, suffix=''):
890 890 from rhodecode.model.permission import PermissionModel
891 891 return PermissionModel().get_default_perms(user.user_perms, suffix)
892 892
893 893 def get_default_perms(self, suffix=''):
894 894 return self._get_default_perms(self, suffix)
895 895
896 896 def get_api_data(self, include_secrets=False, details='full'):
897 897 """
898 898 Common function for generating user related data for API
899 899
900 900 :param include_secrets: By default secrets in the API data will be replaced
901 901 by a placeholder value to prevent exposing this data by accident. In case
902 902 this data shall be exposed, set this flag to ``True``.
903 903
904 904 :param details: details can be 'basic|full' basic gives only a subset of
905 905 the available user information that includes user_id, name and emails.
906 906 """
907 907 user = self
908 908 user_data = self.user_data
909 909 data = {
910 910 'user_id': user.user_id,
911 911 'username': user.username,
912 912 'firstname': user.name,
913 913 'lastname': user.lastname,
914 914 'email': user.email,
915 915 'emails': user.emails,
916 916 }
917 917 if details == 'basic':
918 918 return data
919 919
920 920 api_key_length = 40
921 921 api_key_replacement = '*' * api_key_length
922 922
923 923 extras = {
924 924 'api_keys': [api_key_replacement],
925 925 'auth_tokens': [api_key_replacement],
926 926 'active': user.active,
927 927 'admin': user.admin,
928 928 'extern_type': user.extern_type,
929 929 'extern_name': user.extern_name,
930 930 'last_login': user.last_login,
931 931 'last_activity': user.last_activity,
932 932 'ip_addresses': user.ip_addresses,
933 933 'language': user_data.get('language')
934 934 }
935 935 data.update(extras)
936 936
937 937 if include_secrets:
938 938 data['api_keys'] = user.auth_tokens
939 939 data['auth_tokens'] = user.extra_auth_tokens
940 940 return data
941 941
942 942 def __json__(self):
943 943 data = {
944 944 'full_name': self.full_name,
945 945 'full_name_or_username': self.full_name_or_username,
946 946 'short_contact': self.short_contact,
947 947 'full_contact': self.full_contact,
948 948 }
949 949 data.update(self.get_api_data())
950 950 return data
951 951
952 952
953 953 class UserApiKeys(Base, BaseModel):
954 954 __tablename__ = 'user_api_keys'
955 955 __table_args__ = (
956 956 Index('uak_api_key_idx', 'api_key'),
957 957 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
958 958 UniqueConstraint('api_key'),
959 959 {'extend_existing': True, 'mysql_engine': 'InnoDB',
960 960 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
961 961 )
962 962 __mapper_args__ = {}
963 963
964 964 # ApiKey role
965 965 ROLE_ALL = 'token_role_all'
966 966 ROLE_HTTP = 'token_role_http'
967 967 ROLE_VCS = 'token_role_vcs'
968 968 ROLE_API = 'token_role_api'
969 969 ROLE_FEED = 'token_role_feed'
970 970 ROLE_PASSWORD_RESET = 'token_password_reset'
971 971
972 972 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
973 973
974 974 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
975 975 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
976 976 api_key = Column("api_key", String(255), nullable=False, unique=True)
977 977 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
978 978 expires = Column('expires', Float(53), nullable=False)
979 979 role = Column('role', String(255), nullable=True)
980 980 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
981 981
982 982 # scope columns
983 983 repo_id = Column(
984 984 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
985 985 nullable=True, unique=None, default=None)
986 986 repo = relationship('Repository', lazy='joined')
987 987
988 988 repo_group_id = Column(
989 989 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
990 990 nullable=True, unique=None, default=None)
991 991 repo_group = relationship('RepoGroup', lazy='joined')
992 992
993 993 user = relationship('User', lazy='joined')
994 994
995 995 def __unicode__(self):
996 996 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
997 997
998 998 def __json__(self):
999 999 data = {
1000 1000 'auth_token': self.api_key,
1001 1001 'role': self.role,
1002 1002 'scope': self.scope_humanized,
1003 1003 'expired': self.expired
1004 1004 }
1005 1005 return data
1006 1006
1007 1007 @property
1008 1008 def expired(self):
1009 1009 if self.expires == -1:
1010 1010 return False
1011 1011 return time.time() > self.expires
1012 1012
1013 1013 @classmethod
1014 1014 def _get_role_name(cls, role):
1015 1015 return {
1016 1016 cls.ROLE_ALL: _('all'),
1017 1017 cls.ROLE_HTTP: _('http/web interface'),
1018 1018 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1019 1019 cls.ROLE_API: _('api calls'),
1020 1020 cls.ROLE_FEED: _('feed access'),
1021 1021 }.get(role, role)
1022 1022
1023 1023 @property
1024 1024 def role_humanized(self):
1025 1025 return self._get_role_name(self.role)
1026 1026
1027 1027 def _get_scope(self):
1028 1028 if self.repo:
1029 1029 return repr(self.repo)
1030 1030 if self.repo_group:
1031 1031 return repr(self.repo_group) + ' (recursive)'
1032 1032 return 'global'
1033 1033
1034 1034 @property
1035 1035 def scope_humanized(self):
1036 1036 return self._get_scope()
1037 1037
1038 1038
1039 1039 class UserEmailMap(Base, BaseModel):
1040 1040 __tablename__ = 'user_email_map'
1041 1041 __table_args__ = (
1042 1042 Index('uem_email_idx', 'email'),
1043 1043 UniqueConstraint('email'),
1044 1044 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1045 1045 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1046 1046 )
1047 1047 __mapper_args__ = {}
1048 1048
1049 1049 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1050 1050 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1051 1051 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1052 1052 user = relationship('User', lazy='joined')
1053 1053
1054 1054 @validates('_email')
1055 1055 def validate_email(self, key, email):
1056 1056 # check if this email is not main one
1057 1057 main_email = Session().query(User).filter(User.email == email).scalar()
1058 1058 if main_email is not None:
1059 1059 raise AttributeError('email %s is present is user table' % email)
1060 1060 return email
1061 1061
1062 1062 @hybrid_property
1063 1063 def email(self):
1064 1064 return self._email
1065 1065
1066 1066 @email.setter
1067 1067 def email(self, val):
1068 1068 self._email = val.lower() if val else None
1069 1069
1070 1070
1071 1071 class UserIpMap(Base, BaseModel):
1072 1072 __tablename__ = 'user_ip_map'
1073 1073 __table_args__ = (
1074 1074 UniqueConstraint('user_id', 'ip_addr'),
1075 1075 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1076 1076 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1077 1077 )
1078 1078 __mapper_args__ = {}
1079 1079
1080 1080 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1081 1081 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1082 1082 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1083 1083 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1084 1084 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1085 1085 user = relationship('User', lazy='joined')
1086 1086
1087 1087 @classmethod
1088 1088 def _get_ip_range(cls, ip_addr):
1089 1089 net = ipaddress.ip_network(ip_addr, strict=False)
1090 1090 return [str(net.network_address), str(net.broadcast_address)]
1091 1091
1092 1092 def __json__(self):
1093 1093 return {
1094 1094 'ip_addr': self.ip_addr,
1095 1095 'ip_range': self._get_ip_range(self.ip_addr),
1096 1096 }
1097 1097
1098 1098 def __unicode__(self):
1099 1099 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1100 1100 self.user_id, self.ip_addr)
1101 1101
1102 1102
1103 1103 class UserLog(Base, BaseModel):
1104 1104 __tablename__ = 'user_logs'
1105 1105 __table_args__ = (
1106 1106 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1107 1107 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1108 1108 )
1109 1109 VERSION_1 = 'v1'
1110 1110 VERSION_2 = 'v2'
1111 1111 VERSIONS = [VERSION_1, VERSION_2]
1112 1112
1113 1113 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1114 1114 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1115 1115 username = Column("username", String(255), nullable=True, unique=None, default=None)
1116 1116 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
1117 1117 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1118 1118 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1119 1119 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1120 1120 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1121 1121
1122 1122 version = Column("version", String(255), nullable=True, default=VERSION_1)
1123 1123 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
1124 1124 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
1125 1125
1126 1126 def __unicode__(self):
1127 1127 return u"<%s('id:%s:%s')>" % (
1128 1128 self.__class__.__name__, self.repository_name, self.action)
1129 1129
1130 1130 def __json__(self):
1131 1131 return {
1132 1132 'user_id': self.user_id,
1133 1133 'username': self.username,
1134 1134 'repository_id': self.repository_id,
1135 1135 'repository_name': self.repository_name,
1136 1136 'user_ip': self.user_ip,
1137 1137 'action_date': self.action_date,
1138 1138 'action': self.action,
1139 1139 }
1140 1140
1141 1141 @property
1142 1142 def action_as_day(self):
1143 1143 return datetime.date(*self.action_date.timetuple()[:3])
1144 1144
1145 1145 user = relationship('User')
1146 1146 repository = relationship('Repository', cascade='')
1147 1147
1148 1148
1149 1149 class UserGroup(Base, BaseModel):
1150 1150 __tablename__ = 'users_groups'
1151 1151 __table_args__ = (
1152 1152 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1153 1153 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1154 1154 )
1155 1155
1156 1156 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1157 1157 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1158 1158 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1159 1159 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1160 1160 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1161 1161 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1162 1162 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1163 1163 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1164 1164
1165 1165 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1166 1166 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1167 1167 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1168 1168 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1169 1169 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1170 1170 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1171 1171
1172 1172 user = relationship('User')
1173 1173
1174 1174 @hybrid_property
1175 1175 def group_data(self):
1176 1176 if not self._group_data:
1177 1177 return {}
1178 1178
1179 1179 try:
1180 1180 return json.loads(self._group_data)
1181 1181 except TypeError:
1182 1182 return {}
1183 1183
1184 1184 @group_data.setter
1185 1185 def group_data(self, val):
1186 1186 try:
1187 1187 self._group_data = json.dumps(val)
1188 1188 except Exception:
1189 1189 log.error(traceback.format_exc())
1190 1190
1191 1191 def __unicode__(self):
1192 1192 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1193 1193 self.users_group_id,
1194 1194 self.users_group_name)
1195 1195
1196 1196 @classmethod
1197 1197 def get_by_group_name(cls, group_name, cache=False,
1198 1198 case_insensitive=False):
1199 1199 if case_insensitive:
1200 1200 q = cls.query().filter(func.lower(cls.users_group_name) ==
1201 1201 func.lower(group_name))
1202 1202
1203 1203 else:
1204 1204 q = cls.query().filter(cls.users_group_name == group_name)
1205 1205 if cache:
1206 1206 q = q.options(
1207 1207 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1208 1208 return q.scalar()
1209 1209
1210 1210 @classmethod
1211 1211 def get(cls, user_group_id, cache=False):
1212 1212 user_group = cls.query()
1213 1213 if cache:
1214 1214 user_group = user_group.options(
1215 1215 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1216 1216 return user_group.get(user_group_id)
1217 1217
1218 1218 def permissions(self, with_admins=True, with_owner=True):
1219 1219 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1220 1220 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1221 1221 joinedload(UserUserGroupToPerm.user),
1222 1222 joinedload(UserUserGroupToPerm.permission),)
1223 1223
1224 1224 # get owners and admins and permissions. We do a trick of re-writing
1225 1225 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1226 1226 # has a global reference and changing one object propagates to all
1227 1227 # others. This means if admin is also an owner admin_row that change
1228 1228 # would propagate to both objects
1229 1229 perm_rows = []
1230 1230 for _usr in q.all():
1231 1231 usr = AttributeDict(_usr.user.get_dict())
1232 1232 usr.permission = _usr.permission.permission_name
1233 1233 perm_rows.append(usr)
1234 1234
1235 1235 # filter the perm rows by 'default' first and then sort them by
1236 1236 # admin,write,read,none permissions sorted again alphabetically in
1237 1237 # each group
1238 1238 perm_rows = sorted(perm_rows, key=display_sort)
1239 1239
1240 1240 _admin_perm = 'usergroup.admin'
1241 1241 owner_row = []
1242 1242 if with_owner:
1243 1243 usr = AttributeDict(self.user.get_dict())
1244 1244 usr.owner_row = True
1245 1245 usr.permission = _admin_perm
1246 1246 owner_row.append(usr)
1247 1247
1248 1248 super_admin_rows = []
1249 1249 if with_admins:
1250 1250 for usr in User.get_all_super_admins():
1251 1251 # if this admin is also owner, don't double the record
1252 1252 if usr.user_id == owner_row[0].user_id:
1253 1253 owner_row[0].admin_row = True
1254 1254 else:
1255 1255 usr = AttributeDict(usr.get_dict())
1256 1256 usr.admin_row = True
1257 1257 usr.permission = _admin_perm
1258 1258 super_admin_rows.append(usr)
1259 1259
1260 1260 return super_admin_rows + owner_row + perm_rows
1261 1261
1262 1262 def permission_user_groups(self):
1263 1263 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1264 1264 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1265 1265 joinedload(UserGroupUserGroupToPerm.target_user_group),
1266 1266 joinedload(UserGroupUserGroupToPerm.permission),)
1267 1267
1268 1268 perm_rows = []
1269 1269 for _user_group in q.all():
1270 1270 usr = AttributeDict(_user_group.user_group.get_dict())
1271 1271 usr.permission = _user_group.permission.permission_name
1272 1272 perm_rows.append(usr)
1273 1273
1274 1274 return perm_rows
1275 1275
1276 1276 def _get_default_perms(self, user_group, suffix=''):
1277 1277 from rhodecode.model.permission import PermissionModel
1278 1278 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1279 1279
1280 1280 def get_default_perms(self, suffix=''):
1281 1281 return self._get_default_perms(self, suffix)
1282 1282
1283 1283 def get_api_data(self, with_group_members=True, include_secrets=False):
1284 1284 """
1285 1285 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1286 1286 basically forwarded.
1287 1287
1288 1288 """
1289 1289 user_group = self
1290 1290 data = {
1291 1291 'users_group_id': user_group.users_group_id,
1292 1292 'group_name': user_group.users_group_name,
1293 1293 'group_description': user_group.user_group_description,
1294 1294 'active': user_group.users_group_active,
1295 1295 'owner': user_group.user.username,
1296 1296 'owner_email': user_group.user.email,
1297 1297 }
1298 1298
1299 1299 if with_group_members:
1300 1300 users = []
1301 1301 for user in user_group.members:
1302 1302 user = user.user
1303 1303 users.append(user.get_api_data(include_secrets=include_secrets))
1304 1304 data['users'] = users
1305 1305
1306 1306 return data
1307 1307
1308 1308
1309 1309 class UserGroupMember(Base, BaseModel):
1310 1310 __tablename__ = 'users_groups_members'
1311 1311 __table_args__ = (
1312 1312 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1313 1313 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1314 1314 )
1315 1315
1316 1316 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1317 1317 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1318 1318 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1319 1319
1320 1320 user = relationship('User', lazy='joined')
1321 1321 users_group = relationship('UserGroup')
1322 1322
1323 1323 def __init__(self, gr_id='', u_id=''):
1324 1324 self.users_group_id = gr_id
1325 1325 self.user_id = u_id
1326 1326
1327 1327
1328 1328 class RepositoryField(Base, BaseModel):
1329 1329 __tablename__ = 'repositories_fields'
1330 1330 __table_args__ = (
1331 1331 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1332 1332 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1333 1333 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1334 1334 )
1335 1335 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1336 1336
1337 1337 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1338 1338 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1339 1339 field_key = Column("field_key", String(250))
1340 1340 field_label = Column("field_label", String(1024), nullable=False)
1341 1341 field_value = Column("field_value", String(10000), nullable=False)
1342 1342 field_desc = Column("field_desc", String(1024), nullable=False)
1343 1343 field_type = Column("field_type", String(255), nullable=False, unique=None)
1344 1344 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1345 1345
1346 1346 repository = relationship('Repository')
1347 1347
1348 1348 @property
1349 1349 def field_key_prefixed(self):
1350 1350 return 'ex_%s' % self.field_key
1351 1351
1352 1352 @classmethod
1353 1353 def un_prefix_key(cls, key):
1354 1354 if key.startswith(cls.PREFIX):
1355 1355 return key[len(cls.PREFIX):]
1356 1356 return key
1357 1357
1358 1358 @classmethod
1359 1359 def get_by_key_name(cls, key, repo):
1360 1360 row = cls.query()\
1361 1361 .filter(cls.repository == repo)\
1362 1362 .filter(cls.field_key == key).scalar()
1363 1363 return row
1364 1364
1365 1365
1366 1366 class Repository(Base, BaseModel):
1367 1367 __tablename__ = 'repositories'
1368 1368 __table_args__ = (
1369 1369 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1370 1370 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1371 1371 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1372 1372 )
1373 1373 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1374 1374 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1375 1375
1376 1376 STATE_CREATED = 'repo_state_created'
1377 1377 STATE_PENDING = 'repo_state_pending'
1378 1378 STATE_ERROR = 'repo_state_error'
1379 1379
1380 1380 LOCK_AUTOMATIC = 'lock_auto'
1381 1381 LOCK_API = 'lock_api'
1382 1382 LOCK_WEB = 'lock_web'
1383 1383 LOCK_PULL = 'lock_pull'
1384 1384
1385 1385 NAME_SEP = URL_SEP
1386 1386
1387 1387 repo_id = Column(
1388 1388 "repo_id", Integer(), nullable=False, unique=True, default=None,
1389 1389 primary_key=True)
1390 1390 _repo_name = Column(
1391 1391 "repo_name", Text(), nullable=False, default=None)
1392 1392 _repo_name_hash = Column(
1393 1393 "repo_name_hash", String(255), nullable=False, unique=True)
1394 1394 repo_state = Column("repo_state", String(255), nullable=True)
1395 1395
1396 1396 clone_uri = Column(
1397 1397 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1398 1398 default=None)
1399 1399 repo_type = Column(
1400 1400 "repo_type", String(255), nullable=False, unique=False, default=None)
1401 1401 user_id = Column(
1402 1402 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1403 1403 unique=False, default=None)
1404 1404 private = Column(
1405 1405 "private", Boolean(), nullable=True, unique=None, default=None)
1406 1406 enable_statistics = Column(
1407 1407 "statistics", Boolean(), nullable=True, unique=None, default=True)
1408 1408 enable_downloads = Column(
1409 1409 "downloads", Boolean(), nullable=True, unique=None, default=True)
1410 1410 description = Column(
1411 1411 "description", String(10000), nullable=True, unique=None, default=None)
1412 1412 created_on = Column(
1413 1413 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1414 1414 default=datetime.datetime.now)
1415 1415 updated_on = Column(
1416 1416 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1417 1417 default=datetime.datetime.now)
1418 1418 _landing_revision = Column(
1419 1419 "landing_revision", String(255), nullable=False, unique=False,
1420 1420 default=None)
1421 1421 enable_locking = Column(
1422 1422 "enable_locking", Boolean(), nullable=False, unique=None,
1423 1423 default=False)
1424 1424 _locked = Column(
1425 1425 "locked", String(255), nullable=True, unique=False, default=None)
1426 1426 _changeset_cache = Column(
1427 1427 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1428 1428
1429 1429 fork_id = Column(
1430 1430 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1431 1431 nullable=True, unique=False, default=None)
1432 1432 group_id = Column(
1433 1433 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1434 1434 unique=False, default=None)
1435 1435
1436 1436 user = relationship('User', lazy='joined')
1437 1437 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1438 1438 group = relationship('RepoGroup', lazy='joined')
1439 1439 repo_to_perm = relationship(
1440 1440 'UserRepoToPerm', cascade='all',
1441 1441 order_by='UserRepoToPerm.repo_to_perm_id')
1442 1442 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1443 1443 stats = relationship('Statistics', cascade='all', uselist=False)
1444 1444
1445 1445 followers = relationship(
1446 1446 'UserFollowing',
1447 1447 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1448 1448 cascade='all')
1449 1449 extra_fields = relationship(
1450 1450 'RepositoryField', cascade="all, delete, delete-orphan")
1451 1451 logs = relationship('UserLog')
1452 1452 comments = relationship(
1453 1453 'ChangesetComment', cascade="all, delete, delete-orphan")
1454 1454 pull_requests_source = relationship(
1455 1455 'PullRequest',
1456 1456 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1457 1457 cascade="all, delete, delete-orphan")
1458 1458 pull_requests_target = relationship(
1459 1459 'PullRequest',
1460 1460 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1461 1461 cascade="all, delete, delete-orphan")
1462 1462 ui = relationship('RepoRhodeCodeUi', cascade="all")
1463 1463 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1464 1464 integrations = relationship('Integration',
1465 1465 cascade="all, delete, delete-orphan")
1466 1466
1467 1467 def __unicode__(self):
1468 1468 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1469 1469 safe_unicode(self.repo_name))
1470 1470
1471 1471 @hybrid_property
1472 1472 def landing_rev(self):
1473 1473 # always should return [rev_type, rev]
1474 1474 if self._landing_revision:
1475 1475 _rev_info = self._landing_revision.split(':')
1476 1476 if len(_rev_info) < 2:
1477 1477 _rev_info.insert(0, 'rev')
1478 1478 return [_rev_info[0], _rev_info[1]]
1479 1479 return [None, None]
1480 1480
1481 1481 @landing_rev.setter
1482 1482 def landing_rev(self, val):
1483 1483 if ':' not in val:
1484 1484 raise ValueError('value must be delimited with `:` and consist '
1485 1485 'of <rev_type>:<rev>, got %s instead' % val)
1486 1486 self._landing_revision = val
1487 1487
1488 1488 @hybrid_property
1489 1489 def locked(self):
1490 1490 if self._locked:
1491 1491 user_id, timelocked, reason = self._locked.split(':')
1492 1492 lock_values = int(user_id), timelocked, reason
1493 1493 else:
1494 1494 lock_values = [None, None, None]
1495 1495 return lock_values
1496 1496
1497 1497 @locked.setter
1498 1498 def locked(self, val):
1499 1499 if val and isinstance(val, (list, tuple)):
1500 1500 self._locked = ':'.join(map(str, val))
1501 1501 else:
1502 1502 self._locked = None
1503 1503
1504 1504 @hybrid_property
1505 1505 def changeset_cache(self):
1506 1506 from rhodecode.lib.vcs.backends.base import EmptyCommit
1507 1507 dummy = EmptyCommit().__json__()
1508 1508 if not self._changeset_cache:
1509 1509 return dummy
1510 1510 try:
1511 1511 return json.loads(self._changeset_cache)
1512 1512 except TypeError:
1513 1513 return dummy
1514 1514 except Exception:
1515 1515 log.error(traceback.format_exc())
1516 1516 return dummy
1517 1517
1518 1518 @changeset_cache.setter
1519 1519 def changeset_cache(self, val):
1520 1520 try:
1521 1521 self._changeset_cache = json.dumps(val)
1522 1522 except Exception:
1523 1523 log.error(traceback.format_exc())
1524 1524
1525 1525 @hybrid_property
1526 1526 def repo_name(self):
1527 1527 return self._repo_name
1528 1528
1529 1529 @repo_name.setter
1530 1530 def repo_name(self, value):
1531 1531 self._repo_name = value
1532 1532 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1533 1533
1534 1534 @classmethod
1535 1535 def normalize_repo_name(cls, repo_name):
1536 1536 """
1537 1537 Normalizes os specific repo_name to the format internally stored inside
1538 1538 database using URL_SEP
1539 1539
1540 1540 :param cls:
1541 1541 :param repo_name:
1542 1542 """
1543 1543 return cls.NAME_SEP.join(repo_name.split(os.sep))
1544 1544
1545 1545 @classmethod
1546 1546 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1547 1547 session = Session()
1548 1548 q = session.query(cls).filter(cls.repo_name == repo_name)
1549 1549
1550 1550 if cache:
1551 1551 if identity_cache:
1552 1552 val = cls.identity_cache(session, 'repo_name', repo_name)
1553 1553 if val:
1554 1554 return val
1555 1555 else:
1556 1556 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1557 1557 q = q.options(
1558 1558 FromCache("sql_cache_short", cache_key))
1559 1559
1560 1560 return q.scalar()
1561 1561
1562 1562 @classmethod
1563 1563 def get_by_full_path(cls, repo_full_path):
1564 1564 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1565 1565 repo_name = cls.normalize_repo_name(repo_name)
1566 1566 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1567 1567
1568 1568 @classmethod
1569 1569 def get_repo_forks(cls, repo_id):
1570 1570 return cls.query().filter(Repository.fork_id == repo_id)
1571 1571
1572 1572 @classmethod
1573 1573 def base_path(cls):
1574 1574 """
1575 1575 Returns base path when all repos are stored
1576 1576
1577 1577 :param cls:
1578 1578 """
1579 1579 q = Session().query(RhodeCodeUi)\
1580 1580 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1581 1581 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1582 1582 return q.one().ui_value
1583 1583
1584 1584 @classmethod
1585 1585 def is_valid(cls, repo_name):
1586 1586 """
1587 1587 returns True if given repo name is a valid filesystem repository
1588 1588
1589 1589 :param cls:
1590 1590 :param repo_name:
1591 1591 """
1592 1592 from rhodecode.lib.utils import is_valid_repo
1593 1593
1594 1594 return is_valid_repo(repo_name, cls.base_path())
1595 1595
1596 1596 @classmethod
1597 1597 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1598 1598 case_insensitive=True):
1599 1599 q = Repository.query()
1600 1600
1601 1601 if not isinstance(user_id, Optional):
1602 1602 q = q.filter(Repository.user_id == user_id)
1603 1603
1604 1604 if not isinstance(group_id, Optional):
1605 1605 q = q.filter(Repository.group_id == group_id)
1606 1606
1607 1607 if case_insensitive:
1608 1608 q = q.order_by(func.lower(Repository.repo_name))
1609 1609 else:
1610 1610 q = q.order_by(Repository.repo_name)
1611 1611 return q.all()
1612 1612
1613 1613 @property
1614 1614 def forks(self):
1615 1615 """
1616 1616 Return forks of this repo
1617 1617 """
1618 1618 return Repository.get_repo_forks(self.repo_id)
1619 1619
1620 1620 @property
1621 1621 def parent(self):
1622 1622 """
1623 1623 Returns fork parent
1624 1624 """
1625 1625 return self.fork
1626 1626
1627 1627 @property
1628 1628 def just_name(self):
1629 1629 return self.repo_name.split(self.NAME_SEP)[-1]
1630 1630
1631 1631 @property
1632 1632 def groups_with_parents(self):
1633 1633 groups = []
1634 1634 if self.group is None:
1635 1635 return groups
1636 1636
1637 1637 cur_gr = self.group
1638 1638 groups.insert(0, cur_gr)
1639 1639 while 1:
1640 1640 gr = getattr(cur_gr, 'parent_group', None)
1641 1641 cur_gr = cur_gr.parent_group
1642 1642 if gr is None:
1643 1643 break
1644 1644 groups.insert(0, gr)
1645 1645
1646 1646 return groups
1647 1647
1648 1648 @property
1649 1649 def groups_and_repo(self):
1650 1650 return self.groups_with_parents, self
1651 1651
1652 1652 @LazyProperty
1653 1653 def repo_path(self):
1654 1654 """
1655 1655 Returns base full path for that repository means where it actually
1656 1656 exists on a filesystem
1657 1657 """
1658 1658 q = Session().query(RhodeCodeUi).filter(
1659 1659 RhodeCodeUi.ui_key == self.NAME_SEP)
1660 1660 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1661 1661 return q.one().ui_value
1662 1662
1663 1663 @property
1664 1664 def repo_full_path(self):
1665 1665 p = [self.repo_path]
1666 1666 # we need to split the name by / since this is how we store the
1667 1667 # names in the database, but that eventually needs to be converted
1668 1668 # into a valid system path
1669 1669 p += self.repo_name.split(self.NAME_SEP)
1670 1670 return os.path.join(*map(safe_unicode, p))
1671 1671
1672 1672 @property
1673 1673 def cache_keys(self):
1674 1674 """
1675 1675 Returns associated cache keys for that repo
1676 1676 """
1677 1677 return CacheKey.query()\
1678 1678 .filter(CacheKey.cache_args == self.repo_name)\
1679 1679 .order_by(CacheKey.cache_key)\
1680 1680 .all()
1681 1681
1682 1682 def get_new_name(self, repo_name):
1683 1683 """
1684 1684 returns new full repository name based on assigned group and new new
1685 1685
1686 1686 :param group_name:
1687 1687 """
1688 1688 path_prefix = self.group.full_path_splitted if self.group else []
1689 1689 return self.NAME_SEP.join(path_prefix + [repo_name])
1690 1690
1691 1691 @property
1692 1692 def _config(self):
1693 1693 """
1694 1694 Returns db based config object.
1695 1695 """
1696 1696 from rhodecode.lib.utils import make_db_config
1697 1697 return make_db_config(clear_session=False, repo=self)
1698 1698
1699 1699 def permissions(self, with_admins=True, with_owner=True):
1700 1700 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1701 1701 q = q.options(joinedload(UserRepoToPerm.repository),
1702 1702 joinedload(UserRepoToPerm.user),
1703 1703 joinedload(UserRepoToPerm.permission),)
1704 1704
1705 1705 # get owners and admins and permissions. We do a trick of re-writing
1706 1706 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1707 1707 # has a global reference and changing one object propagates to all
1708 1708 # others. This means if admin is also an owner admin_row that change
1709 1709 # would propagate to both objects
1710 1710 perm_rows = []
1711 1711 for _usr in q.all():
1712 1712 usr = AttributeDict(_usr.user.get_dict())
1713 1713 usr.permission = _usr.permission.permission_name
1714 1714 perm_rows.append(usr)
1715 1715
1716 1716 # filter the perm rows by 'default' first and then sort them by
1717 1717 # admin,write,read,none permissions sorted again alphabetically in
1718 1718 # each group
1719 1719 perm_rows = sorted(perm_rows, key=display_sort)
1720 1720
1721 1721 _admin_perm = 'repository.admin'
1722 1722 owner_row = []
1723 1723 if with_owner:
1724 1724 usr = AttributeDict(self.user.get_dict())
1725 1725 usr.owner_row = True
1726 1726 usr.permission = _admin_perm
1727 1727 owner_row.append(usr)
1728 1728
1729 1729 super_admin_rows = []
1730 1730 if with_admins:
1731 1731 for usr in User.get_all_super_admins():
1732 1732 # if this admin is also owner, don't double the record
1733 1733 if usr.user_id == owner_row[0].user_id:
1734 1734 owner_row[0].admin_row = True
1735 1735 else:
1736 1736 usr = AttributeDict(usr.get_dict())
1737 1737 usr.admin_row = True
1738 1738 usr.permission = _admin_perm
1739 1739 super_admin_rows.append(usr)
1740 1740
1741 1741 return super_admin_rows + owner_row + perm_rows
1742 1742
1743 1743 def permission_user_groups(self):
1744 1744 q = UserGroupRepoToPerm.query().filter(
1745 1745 UserGroupRepoToPerm.repository == self)
1746 1746 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1747 1747 joinedload(UserGroupRepoToPerm.users_group),
1748 1748 joinedload(UserGroupRepoToPerm.permission),)
1749 1749
1750 1750 perm_rows = []
1751 1751 for _user_group in q.all():
1752 1752 usr = AttributeDict(_user_group.users_group.get_dict())
1753 1753 usr.permission = _user_group.permission.permission_name
1754 1754 perm_rows.append(usr)
1755 1755
1756 1756 return perm_rows
1757 1757
1758 1758 def get_api_data(self, include_secrets=False):
1759 1759 """
1760 1760 Common function for generating repo api data
1761 1761
1762 1762 :param include_secrets: See :meth:`User.get_api_data`.
1763 1763
1764 1764 """
1765 1765 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1766 1766 # move this methods on models level.
1767 1767 from rhodecode.model.settings import SettingsModel
1768 1768 from rhodecode.model.repo import RepoModel
1769 1769
1770 1770 repo = self
1771 1771 _user_id, _time, _reason = self.locked
1772 1772
1773 1773 data = {
1774 1774 'repo_id': repo.repo_id,
1775 1775 'repo_name': repo.repo_name,
1776 1776 'repo_type': repo.repo_type,
1777 1777 'clone_uri': repo.clone_uri or '',
1778 1778 'url': RepoModel().get_url(self),
1779 1779 'private': repo.private,
1780 1780 'created_on': repo.created_on,
1781 1781 'description': repo.description,
1782 1782 'landing_rev': repo.landing_rev,
1783 1783 'owner': repo.user.username,
1784 1784 'fork_of': repo.fork.repo_name if repo.fork else None,
1785 1785 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1786 1786 'enable_statistics': repo.enable_statistics,
1787 1787 'enable_locking': repo.enable_locking,
1788 1788 'enable_downloads': repo.enable_downloads,
1789 1789 'last_changeset': repo.changeset_cache,
1790 1790 'locked_by': User.get(_user_id).get_api_data(
1791 1791 include_secrets=include_secrets) if _user_id else None,
1792 1792 'locked_date': time_to_datetime(_time) if _time else None,
1793 1793 'lock_reason': _reason if _reason else None,
1794 1794 }
1795 1795
1796 1796 # TODO: mikhail: should be per-repo settings here
1797 1797 rc_config = SettingsModel().get_all_settings()
1798 1798 repository_fields = str2bool(
1799 1799 rc_config.get('rhodecode_repository_fields'))
1800 1800 if repository_fields:
1801 1801 for f in self.extra_fields:
1802 1802 data[f.field_key_prefixed] = f.field_value
1803 1803
1804 1804 return data
1805 1805
1806 1806 @classmethod
1807 1807 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
1808 1808 if not lock_time:
1809 1809 lock_time = time.time()
1810 1810 if not lock_reason:
1811 1811 lock_reason = cls.LOCK_AUTOMATIC
1812 1812 repo.locked = [user_id, lock_time, lock_reason]
1813 1813 Session().add(repo)
1814 1814 Session().commit()
1815 1815
1816 1816 @classmethod
1817 1817 def unlock(cls, repo):
1818 1818 repo.locked = None
1819 1819 Session().add(repo)
1820 1820 Session().commit()
1821 1821
1822 1822 @classmethod
1823 1823 def getlock(cls, repo):
1824 1824 return repo.locked
1825 1825
1826 1826 def is_user_lock(self, user_id):
1827 1827 if self.lock[0]:
1828 1828 lock_user_id = safe_int(self.lock[0])
1829 1829 user_id = safe_int(user_id)
1830 1830 # both are ints, and they are equal
1831 1831 return all([lock_user_id, user_id]) and lock_user_id == user_id
1832 1832
1833 1833 return False
1834 1834
1835 1835 def get_locking_state(self, action, user_id, only_when_enabled=True):
1836 1836 """
1837 1837 Checks locking on this repository, if locking is enabled and lock is
1838 1838 present returns a tuple of make_lock, locked, locked_by.
1839 1839 make_lock can have 3 states None (do nothing) True, make lock
1840 1840 False release lock, This value is later propagated to hooks, which
1841 1841 do the locking. Think about this as signals passed to hooks what to do.
1842 1842
1843 1843 """
1844 1844 # TODO: johbo: This is part of the business logic and should be moved
1845 1845 # into the RepositoryModel.
1846 1846
1847 1847 if action not in ('push', 'pull'):
1848 1848 raise ValueError("Invalid action value: %s" % repr(action))
1849 1849
1850 1850 # defines if locked error should be thrown to user
1851 1851 currently_locked = False
1852 1852 # defines if new lock should be made, tri-state
1853 1853 make_lock = None
1854 1854 repo = self
1855 1855 user = User.get(user_id)
1856 1856
1857 1857 lock_info = repo.locked
1858 1858
1859 1859 if repo and (repo.enable_locking or not only_when_enabled):
1860 1860 if action == 'push':
1861 1861 # check if it's already locked !, if it is compare users
1862 1862 locked_by_user_id = lock_info[0]
1863 1863 if user.user_id == locked_by_user_id:
1864 1864 log.debug(
1865 1865 'Got `push` action from user %s, now unlocking', user)
1866 1866 # unlock if we have push from user who locked
1867 1867 make_lock = False
1868 1868 else:
1869 1869 # we're not the same user who locked, ban with
1870 1870 # code defined in settings (default is 423 HTTP Locked) !
1871 1871 log.debug('Repo %s is currently locked by %s', repo, user)
1872 1872 currently_locked = True
1873 1873 elif action == 'pull':
1874 1874 # [0] user [1] date
1875 1875 if lock_info[0] and lock_info[1]:
1876 1876 log.debug('Repo %s is currently locked by %s', repo, user)
1877 1877 currently_locked = True
1878 1878 else:
1879 1879 log.debug('Setting lock on repo %s by %s', repo, user)
1880 1880 make_lock = True
1881 1881
1882 1882 else:
1883 1883 log.debug('Repository %s do not have locking enabled', repo)
1884 1884
1885 1885 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
1886 1886 make_lock, currently_locked, lock_info)
1887 1887
1888 1888 from rhodecode.lib.auth import HasRepoPermissionAny
1889 1889 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
1890 1890 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
1891 1891 # if we don't have at least write permission we cannot make a lock
1892 1892 log.debug('lock state reset back to FALSE due to lack '
1893 1893 'of at least read permission')
1894 1894 make_lock = False
1895 1895
1896 1896 return make_lock, currently_locked, lock_info
1897 1897
1898 1898 @property
1899 1899 def last_db_change(self):
1900 1900 return self.updated_on
1901 1901
1902 1902 @property
1903 1903 def clone_uri_hidden(self):
1904 1904 clone_uri = self.clone_uri
1905 1905 if clone_uri:
1906 1906 import urlobject
1907 1907 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
1908 1908 if url_obj.password:
1909 1909 clone_uri = url_obj.with_password('*****')
1910 1910 return clone_uri
1911 1911
1912 1912 def clone_url(self, **override):
1913 1913
1914 1914 uri_tmpl = None
1915 1915 if 'with_id' in override:
1916 1916 uri_tmpl = self.DEFAULT_CLONE_URI_ID
1917 1917 del override['with_id']
1918 1918
1919 1919 if 'uri_tmpl' in override:
1920 1920 uri_tmpl = override['uri_tmpl']
1921 1921 del override['uri_tmpl']
1922 1922
1923 1923 # we didn't override our tmpl from **overrides
1924 1924 if not uri_tmpl:
1925 1925 uri_tmpl = self.DEFAULT_CLONE_URI
1926 1926 try:
1927 1927 from pylons import tmpl_context as c
1928 1928 uri_tmpl = c.clone_uri_tmpl
1929 1929 except Exception:
1930 1930 # in any case if we call this outside of request context,
1931 1931 # ie, not having tmpl_context set up
1932 1932 pass
1933 1933
1934 1934 request = get_current_request()
1935 1935 return get_clone_url(request=request,
1936 1936 uri_tmpl=uri_tmpl,
1937 1937 repo_name=self.repo_name,
1938 1938 repo_id=self.repo_id, **override)
1939 1939
1940 1940 def set_state(self, state):
1941 1941 self.repo_state = state
1942 1942 Session().add(self)
1943 1943 #==========================================================================
1944 1944 # SCM PROPERTIES
1945 1945 #==========================================================================
1946 1946
1947 1947 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
1948 1948 return get_commit_safe(
1949 1949 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
1950 1950
1951 1951 def get_changeset(self, rev=None, pre_load=None):
1952 1952 warnings.warn("Use get_commit", DeprecationWarning)
1953 1953 commit_id = None
1954 1954 commit_idx = None
1955 1955 if isinstance(rev, basestring):
1956 1956 commit_id = rev
1957 1957 else:
1958 1958 commit_idx = rev
1959 1959 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
1960 1960 pre_load=pre_load)
1961 1961
1962 1962 def get_landing_commit(self):
1963 1963 """
1964 1964 Returns landing commit, or if that doesn't exist returns the tip
1965 1965 """
1966 1966 _rev_type, _rev = self.landing_rev
1967 1967 commit = self.get_commit(_rev)
1968 1968 if isinstance(commit, EmptyCommit):
1969 1969 return self.get_commit()
1970 1970 return commit
1971 1971
1972 1972 def update_commit_cache(self, cs_cache=None, config=None):
1973 1973 """
1974 1974 Update cache of last changeset for repository, keys should be::
1975 1975
1976 1976 short_id
1977 1977 raw_id
1978 1978 revision
1979 1979 parents
1980 1980 message
1981 1981 date
1982 1982 author
1983 1983
1984 1984 :param cs_cache:
1985 1985 """
1986 1986 from rhodecode.lib.vcs.backends.base import BaseChangeset
1987 1987 if cs_cache is None:
1988 1988 # use no-cache version here
1989 1989 scm_repo = self.scm_instance(cache=False, config=config)
1990 1990 if scm_repo:
1991 1991 cs_cache = scm_repo.get_commit(
1992 1992 pre_load=["author", "date", "message", "parents"])
1993 1993 else:
1994 1994 cs_cache = EmptyCommit()
1995 1995
1996 1996 if isinstance(cs_cache, BaseChangeset):
1997 1997 cs_cache = cs_cache.__json__()
1998 1998
1999 1999 def is_outdated(new_cs_cache):
2000 2000 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2001 2001 new_cs_cache['revision'] != self.changeset_cache['revision']):
2002 2002 return True
2003 2003 return False
2004 2004
2005 2005 # check if we have maybe already latest cached revision
2006 2006 if is_outdated(cs_cache) or not self.changeset_cache:
2007 2007 _default = datetime.datetime.fromtimestamp(0)
2008 2008 last_change = cs_cache.get('date') or _default
2009 2009 log.debug('updated repo %s with new cs cache %s',
2010 2010 self.repo_name, cs_cache)
2011 2011 self.updated_on = last_change
2012 2012 self.changeset_cache = cs_cache
2013 2013 Session().add(self)
2014 2014 Session().commit()
2015 2015 else:
2016 2016 log.debug('Skipping update_commit_cache for repo:`%s` '
2017 2017 'commit already with latest changes', self.repo_name)
2018 2018
2019 2019 @property
2020 2020 def tip(self):
2021 2021 return self.get_commit('tip')
2022 2022
2023 2023 @property
2024 2024 def author(self):
2025 2025 return self.tip.author
2026 2026
2027 2027 @property
2028 2028 def last_change(self):
2029 2029 return self.scm_instance().last_change
2030 2030
2031 2031 def get_comments(self, revisions=None):
2032 2032 """
2033 2033 Returns comments for this repository grouped by revisions
2034 2034
2035 2035 :param revisions: filter query by revisions only
2036 2036 """
2037 2037 cmts = ChangesetComment.query()\
2038 2038 .filter(ChangesetComment.repo == self)
2039 2039 if revisions:
2040 2040 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2041 2041 grouped = collections.defaultdict(list)
2042 2042 for cmt in cmts.all():
2043 2043 grouped[cmt.revision].append(cmt)
2044 2044 return grouped
2045 2045
2046 2046 def statuses(self, revisions=None):
2047 2047 """
2048 2048 Returns statuses for this repository
2049 2049
2050 2050 :param revisions: list of revisions to get statuses for
2051 2051 """
2052 2052 statuses = ChangesetStatus.query()\
2053 2053 .filter(ChangesetStatus.repo == self)\
2054 2054 .filter(ChangesetStatus.version == 0)
2055 2055
2056 2056 if revisions:
2057 2057 # Try doing the filtering in chunks to avoid hitting limits
2058 2058 size = 500
2059 2059 status_results = []
2060 2060 for chunk in xrange(0, len(revisions), size):
2061 2061 status_results += statuses.filter(
2062 2062 ChangesetStatus.revision.in_(
2063 2063 revisions[chunk: chunk+size])
2064 2064 ).all()
2065 2065 else:
2066 2066 status_results = statuses.all()
2067 2067
2068 2068 grouped = {}
2069 2069
2070 2070 # maybe we have open new pullrequest without a status?
2071 2071 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2072 2072 status_lbl = ChangesetStatus.get_status_lbl(stat)
2073 2073 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2074 2074 for rev in pr.revisions:
2075 2075 pr_id = pr.pull_request_id
2076 2076 pr_repo = pr.target_repo.repo_name
2077 2077 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2078 2078
2079 2079 for stat in status_results:
2080 2080 pr_id = pr_repo = None
2081 2081 if stat.pull_request:
2082 2082 pr_id = stat.pull_request.pull_request_id
2083 2083 pr_repo = stat.pull_request.target_repo.repo_name
2084 2084 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2085 2085 pr_id, pr_repo]
2086 2086 return grouped
2087 2087
2088 2088 # ==========================================================================
2089 2089 # SCM CACHE INSTANCE
2090 2090 # ==========================================================================
2091 2091
2092 2092 def scm_instance(self, **kwargs):
2093 2093 import rhodecode
2094 2094
2095 2095 # Passing a config will not hit the cache currently only used
2096 2096 # for repo2dbmapper
2097 2097 config = kwargs.pop('config', None)
2098 2098 cache = kwargs.pop('cache', None)
2099 2099 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2100 2100 # if cache is NOT defined use default global, else we have a full
2101 2101 # control over cache behaviour
2102 2102 if cache is None and full_cache and not config:
2103 2103 return self._get_instance_cached()
2104 2104 return self._get_instance(cache=bool(cache), config=config)
2105 2105
2106 2106 def _get_instance_cached(self):
2107 2107 @cache_region('long_term')
2108 2108 def _get_repo(cache_key):
2109 2109 return self._get_instance()
2110 2110
2111 2111 invalidator_context = CacheKey.repo_context_cache(
2112 2112 _get_repo, self.repo_name, None, thread_scoped=True)
2113 2113
2114 2114 with invalidator_context as context:
2115 2115 context.invalidate()
2116 2116 repo = context.compute()
2117 2117
2118 2118 return repo
2119 2119
2120 2120 def _get_instance(self, cache=True, config=None):
2121 2121 config = config or self._config
2122 2122 custom_wire = {
2123 2123 'cache': cache # controls the vcs.remote cache
2124 2124 }
2125 2125 repo = get_vcs_instance(
2126 2126 repo_path=safe_str(self.repo_full_path),
2127 2127 config=config,
2128 2128 with_wire=custom_wire,
2129 2129 create=False,
2130 2130 _vcs_alias=self.repo_type)
2131 2131
2132 2132 return repo
2133 2133
2134 2134 def __json__(self):
2135 2135 return {'landing_rev': self.landing_rev}
2136 2136
2137 2137 def get_dict(self):
2138 2138
2139 2139 # Since we transformed `repo_name` to a hybrid property, we need to
2140 2140 # keep compatibility with the code which uses `repo_name` field.
2141 2141
2142 2142 result = super(Repository, self).get_dict()
2143 2143 result['repo_name'] = result.pop('_repo_name', None)
2144 2144 return result
2145 2145
2146 2146
2147 2147 class RepoGroup(Base, BaseModel):
2148 2148 __tablename__ = 'groups'
2149 2149 __table_args__ = (
2150 2150 UniqueConstraint('group_name', 'group_parent_id'),
2151 2151 CheckConstraint('group_id != group_parent_id'),
2152 2152 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2153 2153 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2154 2154 )
2155 2155 __mapper_args__ = {'order_by': 'group_name'}
2156 2156
2157 2157 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2158 2158
2159 2159 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2160 2160 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2161 2161 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2162 2162 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2163 2163 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2164 2164 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2165 2165 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2166 2166 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2167 2167
2168 2168 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2169 2169 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2170 2170 parent_group = relationship('RepoGroup', remote_side=group_id)
2171 2171 user = relationship('User')
2172 2172 integrations = relationship('Integration',
2173 2173 cascade="all, delete, delete-orphan")
2174 2174
2175 2175 def __init__(self, group_name='', parent_group=None):
2176 2176 self.group_name = group_name
2177 2177 self.parent_group = parent_group
2178 2178
2179 2179 def __unicode__(self):
2180 2180 return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
2181 2181 self.group_name)
2182 2182
2183 2183 @classmethod
2184 2184 def _generate_choice(cls, repo_group):
2185 2185 from webhelpers.html import literal as _literal
2186 2186 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2187 2187 return repo_group.group_id, _name(repo_group.full_path_splitted)
2188 2188
2189 2189 @classmethod
2190 2190 def groups_choices(cls, groups=None, show_empty_group=True):
2191 2191 if not groups:
2192 2192 groups = cls.query().all()
2193 2193
2194 2194 repo_groups = []
2195 2195 if show_empty_group:
2196 2196 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2197 2197
2198 2198 repo_groups.extend([cls._generate_choice(x) for x in groups])
2199 2199
2200 2200 repo_groups = sorted(
2201 2201 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2202 2202 return repo_groups
2203 2203
2204 2204 @classmethod
2205 2205 def url_sep(cls):
2206 2206 return URL_SEP
2207 2207
2208 2208 @classmethod
2209 2209 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2210 2210 if case_insensitive:
2211 2211 gr = cls.query().filter(func.lower(cls.group_name)
2212 2212 == func.lower(group_name))
2213 2213 else:
2214 2214 gr = cls.query().filter(cls.group_name == group_name)
2215 2215 if cache:
2216 2216 name_key = _hash_key(group_name)
2217 2217 gr = gr.options(
2218 2218 FromCache("sql_cache_short", "get_group_%s" % name_key))
2219 2219 return gr.scalar()
2220 2220
2221 2221 @classmethod
2222 2222 def get_user_personal_repo_group(cls, user_id):
2223 2223 user = User.get(user_id)
2224 2224 if user.username == User.DEFAULT_USER:
2225 2225 return None
2226 2226
2227 2227 return cls.query()\
2228 2228 .filter(cls.personal == true()) \
2229 2229 .filter(cls.user == user).scalar()
2230 2230
2231 2231 @classmethod
2232 2232 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2233 2233 case_insensitive=True):
2234 2234 q = RepoGroup.query()
2235 2235
2236 2236 if not isinstance(user_id, Optional):
2237 2237 q = q.filter(RepoGroup.user_id == user_id)
2238 2238
2239 2239 if not isinstance(group_id, Optional):
2240 2240 q = q.filter(RepoGroup.group_parent_id == group_id)
2241 2241
2242 2242 if case_insensitive:
2243 2243 q = q.order_by(func.lower(RepoGroup.group_name))
2244 2244 else:
2245 2245 q = q.order_by(RepoGroup.group_name)
2246 2246 return q.all()
2247 2247
2248 2248 @property
2249 2249 def parents(self):
2250 2250 parents_recursion_limit = 10
2251 2251 groups = []
2252 2252 if self.parent_group is None:
2253 2253 return groups
2254 2254 cur_gr = self.parent_group
2255 2255 groups.insert(0, cur_gr)
2256 2256 cnt = 0
2257 2257 while 1:
2258 2258 cnt += 1
2259 2259 gr = getattr(cur_gr, 'parent_group', None)
2260 2260 cur_gr = cur_gr.parent_group
2261 2261 if gr is None:
2262 2262 break
2263 2263 if cnt == parents_recursion_limit:
2264 2264 # this will prevent accidental infinit loops
2265 2265 log.error(('more than %s parents found for group %s, stopping '
2266 2266 'recursive parent fetching' % (parents_recursion_limit, self)))
2267 2267 break
2268 2268
2269 2269 groups.insert(0, gr)
2270 2270 return groups
2271 2271
2272 2272 @property
2273 2273 def children(self):
2274 2274 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2275 2275
2276 2276 @property
2277 2277 def name(self):
2278 2278 return self.group_name.split(RepoGroup.url_sep())[-1]
2279 2279
2280 2280 @property
2281 2281 def full_path(self):
2282 2282 return self.group_name
2283 2283
2284 2284 @property
2285 2285 def full_path_splitted(self):
2286 2286 return self.group_name.split(RepoGroup.url_sep())
2287 2287
2288 2288 @property
2289 2289 def repositories(self):
2290 2290 return Repository.query()\
2291 2291 .filter(Repository.group == self)\
2292 2292 .order_by(Repository.repo_name)
2293 2293
2294 2294 @property
2295 2295 def repositories_recursive_count(self):
2296 2296 cnt = self.repositories.count()
2297 2297
2298 2298 def children_count(group):
2299 2299 cnt = 0
2300 2300 for child in group.children:
2301 2301 cnt += child.repositories.count()
2302 2302 cnt += children_count(child)
2303 2303 return cnt
2304 2304
2305 2305 return cnt + children_count(self)
2306 2306
2307 2307 def _recursive_objects(self, include_repos=True):
2308 2308 all_ = []
2309 2309
2310 2310 def _get_members(root_gr):
2311 2311 if include_repos:
2312 2312 for r in root_gr.repositories:
2313 2313 all_.append(r)
2314 2314 childs = root_gr.children.all()
2315 2315 if childs:
2316 2316 for gr in childs:
2317 2317 all_.append(gr)
2318 2318 _get_members(gr)
2319 2319
2320 2320 _get_members(self)
2321 2321 return [self] + all_
2322 2322
2323 2323 def recursive_groups_and_repos(self):
2324 2324 """
2325 2325 Recursive return all groups, with repositories in those groups
2326 2326 """
2327 2327 return self._recursive_objects()
2328 2328
2329 2329 def recursive_groups(self):
2330 2330 """
2331 2331 Returns all children groups for this group including children of children
2332 2332 """
2333 2333 return self._recursive_objects(include_repos=False)
2334 2334
2335 2335 def get_new_name(self, group_name):
2336 2336 """
2337 2337 returns new full group name based on parent and new name
2338 2338
2339 2339 :param group_name:
2340 2340 """
2341 2341 path_prefix = (self.parent_group.full_path_splitted if
2342 2342 self.parent_group else [])
2343 2343 return RepoGroup.url_sep().join(path_prefix + [group_name])
2344 2344
2345 2345 def permissions(self, with_admins=True, with_owner=True):
2346 2346 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2347 2347 q = q.options(joinedload(UserRepoGroupToPerm.group),
2348 2348 joinedload(UserRepoGroupToPerm.user),
2349 2349 joinedload(UserRepoGroupToPerm.permission),)
2350 2350
2351 2351 # get owners and admins and permissions. We do a trick of re-writing
2352 2352 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2353 2353 # has a global reference and changing one object propagates to all
2354 2354 # others. This means if admin is also an owner admin_row that change
2355 2355 # would propagate to both objects
2356 2356 perm_rows = []
2357 2357 for _usr in q.all():
2358 2358 usr = AttributeDict(_usr.user.get_dict())
2359 2359 usr.permission = _usr.permission.permission_name
2360 2360 perm_rows.append(usr)
2361 2361
2362 2362 # filter the perm rows by 'default' first and then sort them by
2363 2363 # admin,write,read,none permissions sorted again alphabetically in
2364 2364 # each group
2365 2365 perm_rows = sorted(perm_rows, key=display_sort)
2366 2366
2367 2367 _admin_perm = 'group.admin'
2368 2368 owner_row = []
2369 2369 if with_owner:
2370 2370 usr = AttributeDict(self.user.get_dict())
2371 2371 usr.owner_row = True
2372 2372 usr.permission = _admin_perm
2373 2373 owner_row.append(usr)
2374 2374
2375 2375 super_admin_rows = []
2376 2376 if with_admins:
2377 2377 for usr in User.get_all_super_admins():
2378 2378 # if this admin is also owner, don't double the record
2379 2379 if usr.user_id == owner_row[0].user_id:
2380 2380 owner_row[0].admin_row = True
2381 2381 else:
2382 2382 usr = AttributeDict(usr.get_dict())
2383 2383 usr.admin_row = True
2384 2384 usr.permission = _admin_perm
2385 2385 super_admin_rows.append(usr)
2386 2386
2387 2387 return super_admin_rows + owner_row + perm_rows
2388 2388
2389 2389 def permission_user_groups(self):
2390 2390 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2391 2391 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2392 2392 joinedload(UserGroupRepoGroupToPerm.users_group),
2393 2393 joinedload(UserGroupRepoGroupToPerm.permission),)
2394 2394
2395 2395 perm_rows = []
2396 2396 for _user_group in q.all():
2397 2397 usr = AttributeDict(_user_group.users_group.get_dict())
2398 2398 usr.permission = _user_group.permission.permission_name
2399 2399 perm_rows.append(usr)
2400 2400
2401 2401 return perm_rows
2402 2402
2403 2403 def get_api_data(self):
2404 2404 """
2405 2405 Common function for generating api data
2406 2406
2407 2407 """
2408 2408 group = self
2409 2409 data = {
2410 2410 'group_id': group.group_id,
2411 2411 'group_name': group.group_name,
2412 2412 'group_description': group.group_description,
2413 2413 'parent_group': group.parent_group.group_name if group.parent_group else None,
2414 2414 'repositories': [x.repo_name for x in group.repositories],
2415 2415 'owner': group.user.username,
2416 2416 }
2417 2417 return data
2418 2418
2419 2419
2420 2420 class Permission(Base, BaseModel):
2421 2421 __tablename__ = 'permissions'
2422 2422 __table_args__ = (
2423 2423 Index('p_perm_name_idx', 'permission_name'),
2424 2424 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2425 2425 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2426 2426 )
2427 2427 PERMS = [
2428 2428 ('hg.admin', _('RhodeCode Super Administrator')),
2429 2429
2430 2430 ('repository.none', _('Repository no access')),
2431 2431 ('repository.read', _('Repository read access')),
2432 2432 ('repository.write', _('Repository write access')),
2433 2433 ('repository.admin', _('Repository admin access')),
2434 2434
2435 2435 ('group.none', _('Repository group no access')),
2436 2436 ('group.read', _('Repository group read access')),
2437 2437 ('group.write', _('Repository group write access')),
2438 2438 ('group.admin', _('Repository group admin access')),
2439 2439
2440 2440 ('usergroup.none', _('User group no access')),
2441 2441 ('usergroup.read', _('User group read access')),
2442 2442 ('usergroup.write', _('User group write access')),
2443 2443 ('usergroup.admin', _('User group admin access')),
2444 2444
2445 2445 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2446 2446 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2447 2447
2448 2448 ('hg.usergroup.create.false', _('User Group creation disabled')),
2449 2449 ('hg.usergroup.create.true', _('User Group creation enabled')),
2450 2450
2451 2451 ('hg.create.none', _('Repository creation disabled')),
2452 2452 ('hg.create.repository', _('Repository creation enabled')),
2453 2453 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2454 2454 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2455 2455
2456 2456 ('hg.fork.none', _('Repository forking disabled')),
2457 2457 ('hg.fork.repository', _('Repository forking enabled')),
2458 2458
2459 2459 ('hg.register.none', _('Registration disabled')),
2460 2460 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2461 2461 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2462 2462
2463 2463 ('hg.password_reset.enabled', _('Password reset enabled')),
2464 2464 ('hg.password_reset.hidden', _('Password reset hidden')),
2465 2465 ('hg.password_reset.disabled', _('Password reset disabled')),
2466 2466
2467 2467 ('hg.extern_activate.manual', _('Manual activation of external account')),
2468 2468 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2469 2469
2470 2470 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2471 2471 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2472 2472 ]
2473 2473
2474 2474 # definition of system default permissions for DEFAULT user
2475 2475 DEFAULT_USER_PERMISSIONS = [
2476 2476 'repository.read',
2477 2477 'group.read',
2478 2478 'usergroup.read',
2479 2479 'hg.create.repository',
2480 2480 'hg.repogroup.create.false',
2481 2481 'hg.usergroup.create.false',
2482 2482 'hg.create.write_on_repogroup.true',
2483 2483 'hg.fork.repository',
2484 2484 'hg.register.manual_activate',
2485 2485 'hg.password_reset.enabled',
2486 2486 'hg.extern_activate.auto',
2487 2487 'hg.inherit_default_perms.true',
2488 2488 ]
2489 2489
2490 2490 # defines which permissions are more important higher the more important
2491 2491 # Weight defines which permissions are more important.
2492 2492 # The higher number the more important.
2493 2493 PERM_WEIGHTS = {
2494 2494 'repository.none': 0,
2495 2495 'repository.read': 1,
2496 2496 'repository.write': 3,
2497 2497 'repository.admin': 4,
2498 2498
2499 2499 'group.none': 0,
2500 2500 'group.read': 1,
2501 2501 'group.write': 3,
2502 2502 'group.admin': 4,
2503 2503
2504 2504 'usergroup.none': 0,
2505 2505 'usergroup.read': 1,
2506 2506 'usergroup.write': 3,
2507 2507 'usergroup.admin': 4,
2508 2508
2509 2509 'hg.repogroup.create.false': 0,
2510 2510 'hg.repogroup.create.true': 1,
2511 2511
2512 2512 'hg.usergroup.create.false': 0,
2513 2513 'hg.usergroup.create.true': 1,
2514 2514
2515 2515 'hg.fork.none': 0,
2516 2516 'hg.fork.repository': 1,
2517 2517 'hg.create.none': 0,
2518 2518 'hg.create.repository': 1
2519 2519 }
2520 2520
2521 2521 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2522 2522 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2523 2523 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2524 2524
2525 2525 def __unicode__(self):
2526 2526 return u"<%s('%s:%s')>" % (
2527 2527 self.__class__.__name__, self.permission_id, self.permission_name
2528 2528 )
2529 2529
2530 2530 @classmethod
2531 2531 def get_by_key(cls, key):
2532 2532 return cls.query().filter(cls.permission_name == key).scalar()
2533 2533
2534 2534 @classmethod
2535 2535 def get_default_repo_perms(cls, user_id, repo_id=None):
2536 2536 q = Session().query(UserRepoToPerm, Repository, Permission)\
2537 2537 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2538 2538 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2539 2539 .filter(UserRepoToPerm.user_id == user_id)
2540 2540 if repo_id:
2541 2541 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2542 2542 return q.all()
2543 2543
2544 2544 @classmethod
2545 2545 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2546 2546 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2547 2547 .join(
2548 2548 Permission,
2549 2549 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2550 2550 .join(
2551 2551 Repository,
2552 2552 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2553 2553 .join(
2554 2554 UserGroup,
2555 2555 UserGroupRepoToPerm.users_group_id ==
2556 2556 UserGroup.users_group_id)\
2557 2557 .join(
2558 2558 UserGroupMember,
2559 2559 UserGroupRepoToPerm.users_group_id ==
2560 2560 UserGroupMember.users_group_id)\
2561 2561 .filter(
2562 2562 UserGroupMember.user_id == user_id,
2563 2563 UserGroup.users_group_active == true())
2564 2564 if repo_id:
2565 2565 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2566 2566 return q.all()
2567 2567
2568 2568 @classmethod
2569 2569 def get_default_group_perms(cls, user_id, repo_group_id=None):
2570 2570 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2571 2571 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2572 2572 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2573 2573 .filter(UserRepoGroupToPerm.user_id == user_id)
2574 2574 if repo_group_id:
2575 2575 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2576 2576 return q.all()
2577 2577
2578 2578 @classmethod
2579 2579 def get_default_group_perms_from_user_group(
2580 2580 cls, user_id, repo_group_id=None):
2581 2581 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2582 2582 .join(
2583 2583 Permission,
2584 2584 UserGroupRepoGroupToPerm.permission_id ==
2585 2585 Permission.permission_id)\
2586 2586 .join(
2587 2587 RepoGroup,
2588 2588 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2589 2589 .join(
2590 2590 UserGroup,
2591 2591 UserGroupRepoGroupToPerm.users_group_id ==
2592 2592 UserGroup.users_group_id)\
2593 2593 .join(
2594 2594 UserGroupMember,
2595 2595 UserGroupRepoGroupToPerm.users_group_id ==
2596 2596 UserGroupMember.users_group_id)\
2597 2597 .filter(
2598 2598 UserGroupMember.user_id == user_id,
2599 2599 UserGroup.users_group_active == true())
2600 2600 if repo_group_id:
2601 2601 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2602 2602 return q.all()
2603 2603
2604 2604 @classmethod
2605 2605 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2606 2606 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2607 2607 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2608 2608 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2609 2609 .filter(UserUserGroupToPerm.user_id == user_id)
2610 2610 if user_group_id:
2611 2611 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2612 2612 return q.all()
2613 2613
2614 2614 @classmethod
2615 2615 def get_default_user_group_perms_from_user_group(
2616 2616 cls, user_id, user_group_id=None):
2617 2617 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2618 2618 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2619 2619 .join(
2620 2620 Permission,
2621 2621 UserGroupUserGroupToPerm.permission_id ==
2622 2622 Permission.permission_id)\
2623 2623 .join(
2624 2624 TargetUserGroup,
2625 2625 UserGroupUserGroupToPerm.target_user_group_id ==
2626 2626 TargetUserGroup.users_group_id)\
2627 2627 .join(
2628 2628 UserGroup,
2629 2629 UserGroupUserGroupToPerm.user_group_id ==
2630 2630 UserGroup.users_group_id)\
2631 2631 .join(
2632 2632 UserGroupMember,
2633 2633 UserGroupUserGroupToPerm.user_group_id ==
2634 2634 UserGroupMember.users_group_id)\
2635 2635 .filter(
2636 2636 UserGroupMember.user_id == user_id,
2637 2637 UserGroup.users_group_active == true())
2638 2638 if user_group_id:
2639 2639 q = q.filter(
2640 2640 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2641 2641
2642 2642 return q.all()
2643 2643
2644 2644
2645 2645 class UserRepoToPerm(Base, BaseModel):
2646 2646 __tablename__ = 'repo_to_perm'
2647 2647 __table_args__ = (
2648 2648 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2649 2649 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2650 2650 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2651 2651 )
2652 2652 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2653 2653 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2654 2654 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2655 2655 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2656 2656
2657 2657 user = relationship('User')
2658 2658 repository = relationship('Repository')
2659 2659 permission = relationship('Permission')
2660 2660
2661 2661 @classmethod
2662 2662 def create(cls, user, repository, permission):
2663 2663 n = cls()
2664 2664 n.user = user
2665 2665 n.repository = repository
2666 2666 n.permission = permission
2667 2667 Session().add(n)
2668 2668 return n
2669 2669
2670 2670 def __unicode__(self):
2671 2671 return u'<%s => %s >' % (self.user, self.repository)
2672 2672
2673 2673
2674 2674 class UserUserGroupToPerm(Base, BaseModel):
2675 2675 __tablename__ = 'user_user_group_to_perm'
2676 2676 __table_args__ = (
2677 2677 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2678 2678 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2679 2679 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2680 2680 )
2681 2681 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2682 2682 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2683 2683 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2684 2684 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2685 2685
2686 2686 user = relationship('User')
2687 2687 user_group = relationship('UserGroup')
2688 2688 permission = relationship('Permission')
2689 2689
2690 2690 @classmethod
2691 2691 def create(cls, user, user_group, permission):
2692 2692 n = cls()
2693 2693 n.user = user
2694 2694 n.user_group = user_group
2695 2695 n.permission = permission
2696 2696 Session().add(n)
2697 2697 return n
2698 2698
2699 2699 def __unicode__(self):
2700 2700 return u'<%s => %s >' % (self.user, self.user_group)
2701 2701
2702 2702
2703 2703 class UserToPerm(Base, BaseModel):
2704 2704 __tablename__ = 'user_to_perm'
2705 2705 __table_args__ = (
2706 2706 UniqueConstraint('user_id', 'permission_id'),
2707 2707 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2708 2708 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2709 2709 )
2710 2710 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2711 2711 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2712 2712 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2713 2713
2714 2714 user = relationship('User')
2715 2715 permission = relationship('Permission', lazy='joined')
2716 2716
2717 2717 def __unicode__(self):
2718 2718 return u'<%s => %s >' % (self.user, self.permission)
2719 2719
2720 2720
2721 2721 class UserGroupRepoToPerm(Base, BaseModel):
2722 2722 __tablename__ = 'users_group_repo_to_perm'
2723 2723 __table_args__ = (
2724 2724 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2725 2725 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2726 2726 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2727 2727 )
2728 2728 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2729 2729 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2730 2730 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2731 2731 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2732 2732
2733 2733 users_group = relationship('UserGroup')
2734 2734 permission = relationship('Permission')
2735 2735 repository = relationship('Repository')
2736 2736
2737 2737 @classmethod
2738 2738 def create(cls, users_group, repository, permission):
2739 2739 n = cls()
2740 2740 n.users_group = users_group
2741 2741 n.repository = repository
2742 2742 n.permission = permission
2743 2743 Session().add(n)
2744 2744 return n
2745 2745
2746 2746 def __unicode__(self):
2747 2747 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2748 2748
2749 2749
2750 2750 class UserGroupUserGroupToPerm(Base, BaseModel):
2751 2751 __tablename__ = 'user_group_user_group_to_perm'
2752 2752 __table_args__ = (
2753 2753 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2754 2754 CheckConstraint('target_user_group_id != user_group_id'),
2755 2755 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2756 2756 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2757 2757 )
2758 2758 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2759 2759 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2760 2760 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2761 2761 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2762 2762
2763 2763 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
2764 2764 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
2765 2765 permission = relationship('Permission')
2766 2766
2767 2767 @classmethod
2768 2768 def create(cls, target_user_group, user_group, permission):
2769 2769 n = cls()
2770 2770 n.target_user_group = target_user_group
2771 2771 n.user_group = user_group
2772 2772 n.permission = permission
2773 2773 Session().add(n)
2774 2774 return n
2775 2775
2776 2776 def __unicode__(self):
2777 2777 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
2778 2778
2779 2779
2780 2780 class UserGroupToPerm(Base, BaseModel):
2781 2781 __tablename__ = 'users_group_to_perm'
2782 2782 __table_args__ = (
2783 2783 UniqueConstraint('users_group_id', 'permission_id',),
2784 2784 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2785 2785 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2786 2786 )
2787 2787 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2788 2788 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2789 2789 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2790 2790
2791 2791 users_group = relationship('UserGroup')
2792 2792 permission = relationship('Permission')
2793 2793
2794 2794
2795 2795 class UserRepoGroupToPerm(Base, BaseModel):
2796 2796 __tablename__ = 'user_repo_group_to_perm'
2797 2797 __table_args__ = (
2798 2798 UniqueConstraint('user_id', 'group_id', 'permission_id'),
2799 2799 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2800 2800 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2801 2801 )
2802 2802
2803 2803 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2804 2804 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2805 2805 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2806 2806 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2807 2807
2808 2808 user = relationship('User')
2809 2809 group = relationship('RepoGroup')
2810 2810 permission = relationship('Permission')
2811 2811
2812 2812 @classmethod
2813 2813 def create(cls, user, repository_group, permission):
2814 2814 n = cls()
2815 2815 n.user = user
2816 2816 n.group = repository_group
2817 2817 n.permission = permission
2818 2818 Session().add(n)
2819 2819 return n
2820 2820
2821 2821
2822 2822 class UserGroupRepoGroupToPerm(Base, BaseModel):
2823 2823 __tablename__ = 'users_group_repo_group_to_perm'
2824 2824 __table_args__ = (
2825 2825 UniqueConstraint('users_group_id', 'group_id'),
2826 2826 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2827 2827 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2828 2828 )
2829 2829
2830 2830 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2831 2831 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2832 2832 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2833 2833 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2834 2834
2835 2835 users_group = relationship('UserGroup')
2836 2836 permission = relationship('Permission')
2837 2837 group = relationship('RepoGroup')
2838 2838
2839 2839 @classmethod
2840 2840 def create(cls, user_group, repository_group, permission):
2841 2841 n = cls()
2842 2842 n.users_group = user_group
2843 2843 n.group = repository_group
2844 2844 n.permission = permission
2845 2845 Session().add(n)
2846 2846 return n
2847 2847
2848 2848 def __unicode__(self):
2849 2849 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
2850 2850
2851 2851
2852 2852 class Statistics(Base, BaseModel):
2853 2853 __tablename__ = 'statistics'
2854 2854 __table_args__ = (
2855 2855 UniqueConstraint('repository_id'),
2856 2856 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2857 2857 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2858 2858 )
2859 2859 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2860 2860 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
2861 2861 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
2862 2862 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
2863 2863 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
2864 2864 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
2865 2865
2866 2866 repository = relationship('Repository', single_parent=True)
2867 2867
2868 2868
2869 2869 class UserFollowing(Base, BaseModel):
2870 2870 __tablename__ = 'user_followings'
2871 2871 __table_args__ = (
2872 2872 UniqueConstraint('user_id', 'follows_repository_id'),
2873 2873 UniqueConstraint('user_id', 'follows_user_id'),
2874 2874 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2875 2875 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2876 2876 )
2877 2877
2878 2878 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2879 2879 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2880 2880 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
2881 2881 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
2882 2882 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2883 2883
2884 2884 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
2885 2885
2886 2886 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
2887 2887 follows_repository = relationship('Repository', order_by='Repository.repo_name')
2888 2888
2889 2889 @classmethod
2890 2890 def get_repo_followers(cls, repo_id):
2891 2891 return cls.query().filter(cls.follows_repo_id == repo_id)
2892 2892
2893 2893
2894 2894 class CacheKey(Base, BaseModel):
2895 2895 __tablename__ = 'cache_invalidation'
2896 2896 __table_args__ = (
2897 2897 UniqueConstraint('cache_key'),
2898 2898 Index('key_idx', 'cache_key'),
2899 2899 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2900 2900 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2901 2901 )
2902 2902 CACHE_TYPE_ATOM = 'ATOM'
2903 2903 CACHE_TYPE_RSS = 'RSS'
2904 2904 CACHE_TYPE_README = 'README'
2905 2905
2906 2906 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2907 2907 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
2908 2908 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
2909 2909 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
2910 2910
2911 2911 def __init__(self, cache_key, cache_args=''):
2912 2912 self.cache_key = cache_key
2913 2913 self.cache_args = cache_args
2914 2914 self.cache_active = False
2915 2915
2916 2916 def __unicode__(self):
2917 2917 return u"<%s('%s:%s[%s]')>" % (
2918 2918 self.__class__.__name__,
2919 2919 self.cache_id, self.cache_key, self.cache_active)
2920 2920
2921 2921 def _cache_key_partition(self):
2922 2922 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
2923 2923 return prefix, repo_name, suffix
2924 2924
2925 2925 def get_prefix(self):
2926 2926 """
2927 2927 Try to extract prefix from existing cache key. The key could consist
2928 2928 of prefix, repo_name, suffix
2929 2929 """
2930 2930 # this returns prefix, repo_name, suffix
2931 2931 return self._cache_key_partition()[0]
2932 2932
2933 2933 def get_suffix(self):
2934 2934 """
2935 2935 get suffix that might have been used in _get_cache_key to
2936 2936 generate self.cache_key. Only used for informational purposes
2937 2937 in repo_edit.mako.
2938 2938 """
2939 2939 # prefix, repo_name, suffix
2940 2940 return self._cache_key_partition()[2]
2941 2941
2942 2942 @classmethod
2943 2943 def delete_all_cache(cls):
2944 2944 """
2945 2945 Delete all cache keys from database.
2946 2946 Should only be run when all instances are down and all entries
2947 2947 thus stale.
2948 2948 """
2949 2949 cls.query().delete()
2950 2950 Session().commit()
2951 2951
2952 2952 @classmethod
2953 2953 def get_cache_key(cls, repo_name, cache_type):
2954 2954 """
2955 2955
2956 2956 Generate a cache key for this process of RhodeCode instance.
2957 2957 Prefix most likely will be process id or maybe explicitly set
2958 2958 instance_id from .ini file.
2959 2959 """
2960 2960 import rhodecode
2961 2961 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
2962 2962
2963 2963 repo_as_unicode = safe_unicode(repo_name)
2964 2964 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
2965 2965 if cache_type else repo_as_unicode
2966 2966
2967 2967 return u'{}{}'.format(prefix, key)
2968 2968
2969 2969 @classmethod
2970 2970 def set_invalidate(cls, repo_name, delete=False):
2971 2971 """
2972 2972 Mark all caches of a repo as invalid in the database.
2973 2973 """
2974 2974
2975 2975 try:
2976 2976 qry = Session().query(cls).filter(cls.cache_args == repo_name)
2977 2977 if delete:
2978 2978 log.debug('cache objects deleted for repo %s',
2979 2979 safe_str(repo_name))
2980 2980 qry.delete()
2981 2981 else:
2982 2982 log.debug('cache objects marked as invalid for repo %s',
2983 2983 safe_str(repo_name))
2984 2984 qry.update({"cache_active": False})
2985 2985
2986 2986 Session().commit()
2987 2987 except Exception:
2988 2988 log.exception(
2989 2989 'Cache key invalidation failed for repository %s',
2990 2990 safe_str(repo_name))
2991 2991 Session().rollback()
2992 2992
2993 2993 @classmethod
2994 2994 def get_active_cache(cls, cache_key):
2995 2995 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
2996 2996 if inv_obj:
2997 2997 return inv_obj
2998 2998 return None
2999 2999
3000 3000 @classmethod
3001 3001 def repo_context_cache(cls, compute_func, repo_name, cache_type,
3002 3002 thread_scoped=False):
3003 3003 """
3004 3004 @cache_region('long_term')
3005 3005 def _heavy_calculation(cache_key):
3006 3006 return 'result'
3007 3007
3008 3008 cache_context = CacheKey.repo_context_cache(
3009 3009 _heavy_calculation, repo_name, cache_type)
3010 3010
3011 3011 with cache_context as context:
3012 3012 context.invalidate()
3013 3013 computed = context.compute()
3014 3014
3015 3015 assert computed == 'result'
3016 3016 """
3017 3017 from rhodecode.lib import caches
3018 3018 return caches.InvalidationContext(
3019 3019 compute_func, repo_name, cache_type, thread_scoped=thread_scoped)
3020 3020
3021 3021
3022 3022 class ChangesetComment(Base, BaseModel):
3023 3023 __tablename__ = 'changeset_comments'
3024 3024 __table_args__ = (
3025 3025 Index('cc_revision_idx', 'revision'),
3026 3026 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3027 3027 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3028 3028 )
3029 3029
3030 3030 COMMENT_OUTDATED = u'comment_outdated'
3031 3031 COMMENT_TYPE_NOTE = u'note'
3032 3032 COMMENT_TYPE_TODO = u'todo'
3033 3033 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3034 3034
3035 3035 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3036 3036 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3037 3037 revision = Column('revision', String(40), nullable=True)
3038 3038 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3039 3039 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3040 3040 line_no = Column('line_no', Unicode(10), nullable=True)
3041 3041 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3042 3042 f_path = Column('f_path', Unicode(1000), nullable=True)
3043 3043 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3044 3044 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3045 3045 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3046 3046 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3047 3047 renderer = Column('renderer', Unicode(64), nullable=True)
3048 3048 display_state = Column('display_state', Unicode(128), nullable=True)
3049 3049
3050 3050 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3051 3051 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3052 3052 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3053 3053 author = relationship('User', lazy='joined')
3054 3054 repo = relationship('Repository')
3055 3055 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3056 3056 pull_request = relationship('PullRequest', lazy='joined')
3057 3057 pull_request_version = relationship('PullRequestVersion')
3058 3058
3059 3059 @classmethod
3060 3060 def get_users(cls, revision=None, pull_request_id=None):
3061 3061 """
3062 3062 Returns user associated with this ChangesetComment. ie those
3063 3063 who actually commented
3064 3064
3065 3065 :param cls:
3066 3066 :param revision:
3067 3067 """
3068 3068 q = Session().query(User)\
3069 3069 .join(ChangesetComment.author)
3070 3070 if revision:
3071 3071 q = q.filter(cls.revision == revision)
3072 3072 elif pull_request_id:
3073 3073 q = q.filter(cls.pull_request_id == pull_request_id)
3074 3074 return q.all()
3075 3075
3076 3076 @classmethod
3077 3077 def get_index_from_version(cls, pr_version, versions):
3078 3078 num_versions = [x.pull_request_version_id for x in versions]
3079 3079 try:
3080 3080 return num_versions.index(pr_version) +1
3081 3081 except (IndexError, ValueError):
3082 3082 return
3083 3083
3084 3084 @property
3085 3085 def outdated(self):
3086 3086 return self.display_state == self.COMMENT_OUTDATED
3087 3087
3088 3088 def outdated_at_version(self, version):
3089 3089 """
3090 3090 Checks if comment is outdated for given pull request version
3091 3091 """
3092 3092 return self.outdated and self.pull_request_version_id != version
3093 3093
3094 3094 def older_than_version(self, version):
3095 3095 """
3096 3096 Checks if comment is made from previous version than given
3097 3097 """
3098 3098 if version is None:
3099 3099 return self.pull_request_version_id is not None
3100 3100
3101 3101 return self.pull_request_version_id < version
3102 3102
3103 3103 @property
3104 3104 def resolved(self):
3105 3105 return self.resolved_by[0] if self.resolved_by else None
3106 3106
3107 3107 @property
3108 3108 def is_todo(self):
3109 3109 return self.comment_type == self.COMMENT_TYPE_TODO
3110 3110
3111 3111 @property
3112 3112 def is_inline(self):
3113 3113 return self.line_no and self.f_path
3114 3114
3115 3115 def get_index_version(self, versions):
3116 3116 return self.get_index_from_version(
3117 3117 self.pull_request_version_id, versions)
3118 3118
3119 3119 def __repr__(self):
3120 3120 if self.comment_id:
3121 3121 return '<DB:Comment #%s>' % self.comment_id
3122 3122 else:
3123 3123 return '<DB:Comment at %#x>' % id(self)
3124 3124
3125 def get_api_data(self):
3126 comment = self
3127 data = {
3128 'comment_id': comment.comment_id,
3129 'comment_type': comment.comment_type,
3130 'comment_text': comment.text,
3131 'comment_status': comment.status_change,
3132 'comment_f_path': comment.f_path,
3133 'comment_lineno': comment.line_no,
3134 'comment_author': comment.author,
3135 'comment_created_on': comment.created_on
3136 }
3137 return data
3138
3139 def __json__(self):
3140 data = dict()
3141 data.update(self.get_api_data())
3142 return data
3143
3125 3144
3126 3145 class ChangesetStatus(Base, BaseModel):
3127 3146 __tablename__ = 'changeset_statuses'
3128 3147 __table_args__ = (
3129 3148 Index('cs_revision_idx', 'revision'),
3130 3149 Index('cs_version_idx', 'version'),
3131 3150 UniqueConstraint('repo_id', 'revision', 'version'),
3132 3151 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3133 3152 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3134 3153 )
3135 3154 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3136 3155 STATUS_APPROVED = 'approved'
3137 3156 STATUS_REJECTED = 'rejected'
3138 3157 STATUS_UNDER_REVIEW = 'under_review'
3139 3158
3140 3159 STATUSES = [
3141 3160 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3142 3161 (STATUS_APPROVED, _("Approved")),
3143 3162 (STATUS_REJECTED, _("Rejected")),
3144 3163 (STATUS_UNDER_REVIEW, _("Under Review")),
3145 3164 ]
3146 3165
3147 3166 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3148 3167 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3149 3168 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3150 3169 revision = Column('revision', String(40), nullable=False)
3151 3170 status = Column('status', String(128), nullable=False, default=DEFAULT)
3152 3171 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3153 3172 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3154 3173 version = Column('version', Integer(), nullable=False, default=0)
3155 3174 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3156 3175
3157 3176 author = relationship('User', lazy='joined')
3158 3177 repo = relationship('Repository')
3159 3178 comment = relationship('ChangesetComment', lazy='joined')
3160 3179 pull_request = relationship('PullRequest', lazy='joined')
3161 3180
3162 3181 def __unicode__(self):
3163 3182 return u"<%s('%s[v%s]:%s')>" % (
3164 3183 self.__class__.__name__,
3165 3184 self.status, self.version, self.author
3166 3185 )
3167 3186
3168 3187 @classmethod
3169 3188 def get_status_lbl(cls, value):
3170 3189 return dict(cls.STATUSES).get(value)
3171 3190
3172 3191 @property
3173 3192 def status_lbl(self):
3174 3193 return ChangesetStatus.get_status_lbl(self.status)
3175 3194
3195 def get_api_data(self):
3196 status = self
3197 data = {
3198 'status_id': status.changeset_status_id,
3199 'status': status.status,
3200 }
3201 return data
3202
3203 def __json__(self):
3204 data = dict()
3205 data.update(self.get_api_data())
3206 return data
3207
3176 3208
3177 3209 class _PullRequestBase(BaseModel):
3178 3210 """
3179 3211 Common attributes of pull request and version entries.
3180 3212 """
3181 3213
3182 3214 # .status values
3183 3215 STATUS_NEW = u'new'
3184 3216 STATUS_OPEN = u'open'
3185 3217 STATUS_CLOSED = u'closed'
3186 3218
3187 3219 title = Column('title', Unicode(255), nullable=True)
3188 3220 description = Column(
3189 3221 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3190 3222 nullable=True)
3191 3223 # new/open/closed status of pull request (not approve/reject/etc)
3192 3224 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3193 3225 created_on = Column(
3194 3226 'created_on', DateTime(timezone=False), nullable=False,
3195 3227 default=datetime.datetime.now)
3196 3228 updated_on = Column(
3197 3229 'updated_on', DateTime(timezone=False), nullable=False,
3198 3230 default=datetime.datetime.now)
3199 3231
3200 3232 @declared_attr
3201 3233 def user_id(cls):
3202 3234 return Column(
3203 3235 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3204 3236 unique=None)
3205 3237
3206 3238 # 500 revisions max
3207 3239 _revisions = Column(
3208 3240 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3209 3241
3210 3242 @declared_attr
3211 3243 def source_repo_id(cls):
3212 3244 # TODO: dan: rename column to source_repo_id
3213 3245 return Column(
3214 3246 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3215 3247 nullable=False)
3216 3248
3217 3249 source_ref = Column('org_ref', Unicode(255), nullable=False)
3218 3250
3219 3251 @declared_attr
3220 3252 def target_repo_id(cls):
3221 3253 # TODO: dan: rename column to target_repo_id
3222 3254 return Column(
3223 3255 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3224 3256 nullable=False)
3225 3257
3226 3258 target_ref = Column('other_ref', Unicode(255), nullable=False)
3227 3259 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3228 3260
3229 3261 # TODO: dan: rename column to last_merge_source_rev
3230 3262 _last_merge_source_rev = Column(
3231 3263 'last_merge_org_rev', String(40), nullable=True)
3232 3264 # TODO: dan: rename column to last_merge_target_rev
3233 3265 _last_merge_target_rev = Column(
3234 3266 'last_merge_other_rev', String(40), nullable=True)
3235 3267 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3236 3268 merge_rev = Column('merge_rev', String(40), nullable=True)
3237 3269
3238 3270 reviewer_data = Column(
3239 3271 'reviewer_data_json', MutationObj.as_mutable(
3240 3272 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3241 3273
3242 3274 @property
3243 3275 def reviewer_data_json(self):
3244 3276 return json.dumps(self.reviewer_data)
3245 3277
3246 3278 @hybrid_property
3247 3279 def revisions(self):
3248 3280 return self._revisions.split(':') if self._revisions else []
3249 3281
3250 3282 @revisions.setter
3251 3283 def revisions(self, val):
3252 3284 self._revisions = ':'.join(val)
3253 3285
3254 3286 @declared_attr
3255 3287 def author(cls):
3256 3288 return relationship('User', lazy='joined')
3257 3289
3258 3290 @declared_attr
3259 3291 def source_repo(cls):
3260 3292 return relationship(
3261 3293 'Repository',
3262 3294 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3263 3295
3264 3296 @property
3265 3297 def source_ref_parts(self):
3266 3298 return self.unicode_to_reference(self.source_ref)
3267 3299
3268 3300 @declared_attr
3269 3301 def target_repo(cls):
3270 3302 return relationship(
3271 3303 'Repository',
3272 3304 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3273 3305
3274 3306 @property
3275 3307 def target_ref_parts(self):
3276 3308 return self.unicode_to_reference(self.target_ref)
3277 3309
3278 3310 @property
3279 3311 def shadow_merge_ref(self):
3280 3312 return self.unicode_to_reference(self._shadow_merge_ref)
3281 3313
3282 3314 @shadow_merge_ref.setter
3283 3315 def shadow_merge_ref(self, ref):
3284 3316 self._shadow_merge_ref = self.reference_to_unicode(ref)
3285 3317
3286 3318 def unicode_to_reference(self, raw):
3287 3319 """
3288 3320 Convert a unicode (or string) to a reference object.
3289 3321 If unicode evaluates to False it returns None.
3290 3322 """
3291 3323 if raw:
3292 3324 refs = raw.split(':')
3293 3325 return Reference(*refs)
3294 3326 else:
3295 3327 return None
3296 3328
3297 3329 def reference_to_unicode(self, ref):
3298 3330 """
3299 3331 Convert a reference object to unicode.
3300 3332 If reference is None it returns None.
3301 3333 """
3302 3334 if ref:
3303 3335 return u':'.join(ref)
3304 3336 else:
3305 3337 return None
3306 3338
3307 def get_api_data(self):
3308 from pylons import url
3339 def get_api_data(self, with_merge_state=True):
3309 3340 from rhodecode.model.pull_request import PullRequestModel
3341
3310 3342 pull_request = self
3311 merge_status = PullRequestModel().merge_status(pull_request)
3312
3313 pull_request_url = url(
3314 'pullrequest_show', repo_name=self.target_repo.repo_name,
3315 pull_request_id=self.pull_request_id, qualified=True)
3343 if with_merge_state:
3344 merge_status = PullRequestModel().merge_status(pull_request)
3345 merge_state = {
3346 'status': merge_status[0],
3347 'message': safe_unicode(merge_status[1]),
3348 }
3349 else:
3350 merge_state = {'status': 'not_available',
3351 'message': 'not_available'}
3316 3352
3317 3353 merge_data = {
3318 3354 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3319 3355 'reference': (
3320 3356 pull_request.shadow_merge_ref._asdict()
3321 3357 if pull_request.shadow_merge_ref else None),
3322 3358 }
3323 3359
3324 3360 data = {
3325 3361 'pull_request_id': pull_request.pull_request_id,
3326 'url': pull_request_url,
3362 'url': PullRequestModel().get_url(pull_request),
3327 3363 'title': pull_request.title,
3328 3364 'description': pull_request.description,
3329 3365 'status': pull_request.status,
3330 3366 'created_on': pull_request.created_on,
3331 3367 'updated_on': pull_request.updated_on,
3332 3368 'commit_ids': pull_request.revisions,
3333 3369 'review_status': pull_request.calculated_review_status(),
3334 'mergeable': {
3335 'status': merge_status[0],
3336 'message': unicode(merge_status[1]),
3337 },
3370 'mergeable': merge_state,
3338 3371 'source': {
3339 3372 'clone_url': pull_request.source_repo.clone_url(),
3340 3373 'repository': pull_request.source_repo.repo_name,
3341 3374 'reference': {
3342 3375 'name': pull_request.source_ref_parts.name,
3343 3376 'type': pull_request.source_ref_parts.type,
3344 3377 'commit_id': pull_request.source_ref_parts.commit_id,
3345 3378 },
3346 3379 },
3347 3380 'target': {
3348 3381 'clone_url': pull_request.target_repo.clone_url(),
3349 3382 'repository': pull_request.target_repo.repo_name,
3350 3383 'reference': {
3351 3384 'name': pull_request.target_ref_parts.name,
3352 3385 'type': pull_request.target_ref_parts.type,
3353 3386 'commit_id': pull_request.target_ref_parts.commit_id,
3354 3387 },
3355 3388 },
3356 3389 'merge': merge_data,
3357 3390 'author': pull_request.author.get_api_data(include_secrets=False,
3358 3391 details='basic'),
3359 3392 'reviewers': [
3360 3393 {
3361 3394 'user': reviewer.get_api_data(include_secrets=False,
3362 3395 details='basic'),
3363 3396 'reasons': reasons,
3364 3397 'review_status': st[0][1].status if st else 'not_reviewed',
3365 3398 }
3366 3399 for reviewer, reasons, mandatory, st in
3367 3400 pull_request.reviewers_statuses()
3368 3401 ]
3369 3402 }
3370 3403
3371 3404 return data
3372 3405
3373 3406
3374 3407 class PullRequest(Base, _PullRequestBase):
3375 3408 __tablename__ = 'pull_requests'
3376 3409 __table_args__ = (
3377 3410 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3378 3411 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3379 3412 )
3380 3413
3381 3414 pull_request_id = Column(
3382 3415 'pull_request_id', Integer(), nullable=False, primary_key=True)
3383 3416
3384 3417 def __repr__(self):
3385 3418 if self.pull_request_id:
3386 3419 return '<DB:PullRequest #%s>' % self.pull_request_id
3387 3420 else:
3388 3421 return '<DB:PullRequest at %#x>' % id(self)
3389 3422
3390 3423 reviewers = relationship('PullRequestReviewers',
3391 3424 cascade="all, delete, delete-orphan")
3392 statuses = relationship('ChangesetStatus')
3425 statuses = relationship('ChangesetStatus',
3426 cascade="all, delete, delete-orphan")
3393 3427 comments = relationship('ChangesetComment',
3394 3428 cascade="all, delete, delete-orphan")
3395 3429 versions = relationship('PullRequestVersion',
3396 3430 cascade="all, delete, delete-orphan",
3397 3431 lazy='dynamic')
3398 3432
3399 3433 @classmethod
3400 3434 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3401 3435 internal_methods=None):
3402 3436
3403 3437 class PullRequestDisplay(object):
3404 3438 """
3405 3439 Special object wrapper for showing PullRequest data via Versions
3406 3440 It mimics PR object as close as possible. This is read only object
3407 3441 just for display
3408 3442 """
3409 3443
3410 3444 def __init__(self, attrs, internal=None):
3411 3445 self.attrs = attrs
3412 3446 # internal have priority over the given ones via attrs
3413 3447 self.internal = internal or ['versions']
3414 3448
3415 3449 def __getattr__(self, item):
3416 3450 if item in self.internal:
3417 3451 return getattr(self, item)
3418 3452 try:
3419 3453 return self.attrs[item]
3420 3454 except KeyError:
3421 3455 raise AttributeError(
3422 3456 '%s object has no attribute %s' % (self, item))
3423 3457
3424 3458 def __repr__(self):
3425 3459 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3426 3460
3427 3461 def versions(self):
3428 3462 return pull_request_obj.versions.order_by(
3429 3463 PullRequestVersion.pull_request_version_id).all()
3430 3464
3431 3465 def is_closed(self):
3432 3466 return pull_request_obj.is_closed()
3433 3467
3434 3468 @property
3435 3469 def pull_request_version_id(self):
3436 3470 return getattr(pull_request_obj, 'pull_request_version_id', None)
3437 3471
3438 3472 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3439 3473
3440 3474 attrs.author = StrictAttributeDict(
3441 3475 pull_request_obj.author.get_api_data())
3442 3476 if pull_request_obj.target_repo:
3443 3477 attrs.target_repo = StrictAttributeDict(
3444 3478 pull_request_obj.target_repo.get_api_data())
3445 3479 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3446 3480
3447 3481 if pull_request_obj.source_repo:
3448 3482 attrs.source_repo = StrictAttributeDict(
3449 3483 pull_request_obj.source_repo.get_api_data())
3450 3484 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3451 3485
3452 3486 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3453 3487 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3454 3488 attrs.revisions = pull_request_obj.revisions
3455 3489
3456 3490 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3457 3491 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3458 3492 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3459 3493
3460 3494 return PullRequestDisplay(attrs, internal=internal_methods)
3461 3495
3462 3496 def is_closed(self):
3463 3497 return self.status == self.STATUS_CLOSED
3464 3498
3465 3499 def __json__(self):
3466 3500 return {
3467 3501 'revisions': self.revisions,
3468 3502 }
3469 3503
3470 3504 def calculated_review_status(self):
3471 3505 from rhodecode.model.changeset_status import ChangesetStatusModel
3472 3506 return ChangesetStatusModel().calculated_review_status(self)
3473 3507
3474 3508 def reviewers_statuses(self):
3475 3509 from rhodecode.model.changeset_status import ChangesetStatusModel
3476 3510 return ChangesetStatusModel().reviewers_statuses(self)
3477 3511
3478 3512 @property
3479 3513 def workspace_id(self):
3480 3514 from rhodecode.model.pull_request import PullRequestModel
3481 3515 return PullRequestModel()._workspace_id(self)
3482 3516
3483 3517 def get_shadow_repo(self):
3484 3518 workspace_id = self.workspace_id
3485 3519 vcs_obj = self.target_repo.scm_instance()
3486 3520 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3487 3521 workspace_id)
3488 3522 return vcs_obj._get_shadow_instance(shadow_repository_path)
3489 3523
3490 3524
3491 3525 class PullRequestVersion(Base, _PullRequestBase):
3492 3526 __tablename__ = 'pull_request_versions'
3493 3527 __table_args__ = (
3494 3528 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3495 3529 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3496 3530 )
3497 3531
3498 3532 pull_request_version_id = Column(
3499 3533 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3500 3534 pull_request_id = Column(
3501 3535 'pull_request_id', Integer(),
3502 3536 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3503 3537 pull_request = relationship('PullRequest')
3504 3538
3505 3539 def __repr__(self):
3506 3540 if self.pull_request_version_id:
3507 3541 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3508 3542 else:
3509 3543 return '<DB:PullRequestVersion at %#x>' % id(self)
3510 3544
3511 3545 @property
3512 3546 def reviewers(self):
3513 3547 return self.pull_request.reviewers
3514 3548
3515 3549 @property
3516 3550 def versions(self):
3517 3551 return self.pull_request.versions
3518 3552
3519 3553 def is_closed(self):
3520 3554 # calculate from original
3521 3555 return self.pull_request.status == self.STATUS_CLOSED
3522 3556
3523 3557 def calculated_review_status(self):
3524 3558 return self.pull_request.calculated_review_status()
3525 3559
3526 3560 def reviewers_statuses(self):
3527 3561 return self.pull_request.reviewers_statuses()
3528 3562
3529 3563
3530 3564 class PullRequestReviewers(Base, BaseModel):
3531 3565 __tablename__ = 'pull_request_reviewers'
3532 3566 __table_args__ = (
3533 3567 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3534 3568 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3535 3569 )
3536 3570
3537 3571 @hybrid_property
3538 3572 def reasons(self):
3539 3573 if not self._reasons:
3540 3574 return []
3541 3575 return self._reasons
3542 3576
3543 3577 @reasons.setter
3544 3578 def reasons(self, val):
3545 3579 val = val or []
3546 3580 if any(not isinstance(x, basestring) for x in val):
3547 3581 raise Exception('invalid reasons type, must be list of strings')
3548 3582 self._reasons = val
3549 3583
3550 3584 pull_requests_reviewers_id = Column(
3551 3585 'pull_requests_reviewers_id', Integer(), nullable=False,
3552 3586 primary_key=True)
3553 3587 pull_request_id = Column(
3554 3588 "pull_request_id", Integer(),
3555 3589 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3556 3590 user_id = Column(
3557 3591 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3558 3592 _reasons = Column(
3559 3593 'reason', MutationList.as_mutable(
3560 3594 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3561 3595 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3562 3596 user = relationship('User')
3563 3597 pull_request = relationship('PullRequest')
3564 3598
3565 3599
3566 3600 class Notification(Base, BaseModel):
3567 3601 __tablename__ = 'notifications'
3568 3602 __table_args__ = (
3569 3603 Index('notification_type_idx', 'type'),
3570 3604 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3571 3605 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3572 3606 )
3573 3607
3574 3608 TYPE_CHANGESET_COMMENT = u'cs_comment'
3575 3609 TYPE_MESSAGE = u'message'
3576 3610 TYPE_MENTION = u'mention'
3577 3611 TYPE_REGISTRATION = u'registration'
3578 3612 TYPE_PULL_REQUEST = u'pull_request'
3579 3613 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3580 3614
3581 3615 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3582 3616 subject = Column('subject', Unicode(512), nullable=True)
3583 3617 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3584 3618 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3585 3619 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3586 3620 type_ = Column('type', Unicode(255))
3587 3621
3588 3622 created_by_user = relationship('User')
3589 3623 notifications_to_users = relationship('UserNotification', lazy='joined',
3590 3624 cascade="all, delete, delete-orphan")
3591 3625
3592 3626 @property
3593 3627 def recipients(self):
3594 3628 return [x.user for x in UserNotification.query()\
3595 3629 .filter(UserNotification.notification == self)\
3596 3630 .order_by(UserNotification.user_id.asc()).all()]
3597 3631
3598 3632 @classmethod
3599 3633 def create(cls, created_by, subject, body, recipients, type_=None):
3600 3634 if type_ is None:
3601 3635 type_ = Notification.TYPE_MESSAGE
3602 3636
3603 3637 notification = cls()
3604 3638 notification.created_by_user = created_by
3605 3639 notification.subject = subject
3606 3640 notification.body = body
3607 3641 notification.type_ = type_
3608 3642 notification.created_on = datetime.datetime.now()
3609 3643
3610 3644 for u in recipients:
3611 3645 assoc = UserNotification()
3612 3646 assoc.notification = notification
3613 3647
3614 3648 # if created_by is inside recipients mark his notification
3615 3649 # as read
3616 3650 if u.user_id == created_by.user_id:
3617 3651 assoc.read = True
3618 3652
3619 3653 u.notifications.append(assoc)
3620 3654 Session().add(notification)
3621 3655
3622 3656 return notification
3623 3657
3624 3658 @property
3625 3659 def description(self):
3626 3660 from rhodecode.model.notification import NotificationModel
3627 3661 return NotificationModel().make_description(self)
3628 3662
3629 3663
3630 3664 class UserNotification(Base, BaseModel):
3631 3665 __tablename__ = 'user_to_notification'
3632 3666 __table_args__ = (
3633 3667 UniqueConstraint('user_id', 'notification_id'),
3634 3668 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3635 3669 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3636 3670 )
3637 3671 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3638 3672 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3639 3673 read = Column('read', Boolean, default=False)
3640 3674 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3641 3675
3642 3676 user = relationship('User', lazy="joined")
3643 3677 notification = relationship('Notification', lazy="joined",
3644 3678 order_by=lambda: Notification.created_on.desc(),)
3645 3679
3646 3680 def mark_as_read(self):
3647 3681 self.read = True
3648 3682 Session().add(self)
3649 3683
3650 3684
3651 3685 class Gist(Base, BaseModel):
3652 3686 __tablename__ = 'gists'
3653 3687 __table_args__ = (
3654 3688 Index('g_gist_access_id_idx', 'gist_access_id'),
3655 3689 Index('g_created_on_idx', 'created_on'),
3656 3690 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3657 3691 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3658 3692 )
3659 3693 GIST_PUBLIC = u'public'
3660 3694 GIST_PRIVATE = u'private'
3661 3695 DEFAULT_FILENAME = u'gistfile1.txt'
3662 3696
3663 3697 ACL_LEVEL_PUBLIC = u'acl_public'
3664 3698 ACL_LEVEL_PRIVATE = u'acl_private'
3665 3699
3666 3700 gist_id = Column('gist_id', Integer(), primary_key=True)
3667 3701 gist_access_id = Column('gist_access_id', Unicode(250))
3668 3702 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3669 3703 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3670 3704 gist_expires = Column('gist_expires', Float(53), nullable=False)
3671 3705 gist_type = Column('gist_type', Unicode(128), nullable=False)
3672 3706 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3673 3707 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3674 3708 acl_level = Column('acl_level', Unicode(128), nullable=True)
3675 3709
3676 3710 owner = relationship('User')
3677 3711
3678 3712 def __repr__(self):
3679 3713 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3680 3714
3681 3715 @classmethod
3682 3716 def get_or_404(cls, id_, pyramid_exc=False):
3683 3717
3684 3718 if pyramid_exc:
3685 3719 from pyramid.httpexceptions import HTTPNotFound
3686 3720 else:
3687 3721 from webob.exc import HTTPNotFound
3688 3722
3689 3723 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3690 3724 if not res:
3691 3725 raise HTTPNotFound
3692 3726 return res
3693 3727
3694 3728 @classmethod
3695 3729 def get_by_access_id(cls, gist_access_id):
3696 3730 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3697 3731
3698 3732 def gist_url(self):
3699 3733 import rhodecode
3700 3734 from pylons import url
3701 3735
3702 3736 alias_url = rhodecode.CONFIG.get('gist_alias_url')
3703 3737 if alias_url:
3704 3738 return alias_url.replace('{gistid}', self.gist_access_id)
3705 3739
3706 3740 return url('gist', gist_id=self.gist_access_id, qualified=True)
3707 3741
3708 3742 @classmethod
3709 3743 def base_path(cls):
3710 3744 """
3711 3745 Returns base path when all gists are stored
3712 3746
3713 3747 :param cls:
3714 3748 """
3715 3749 from rhodecode.model.gist import GIST_STORE_LOC
3716 3750 q = Session().query(RhodeCodeUi)\
3717 3751 .filter(RhodeCodeUi.ui_key == URL_SEP)
3718 3752 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
3719 3753 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
3720 3754
3721 3755 def get_api_data(self):
3722 3756 """
3723 3757 Common function for generating gist related data for API
3724 3758 """
3725 3759 gist = self
3726 3760 data = {
3727 3761 'gist_id': gist.gist_id,
3728 3762 'type': gist.gist_type,
3729 3763 'access_id': gist.gist_access_id,
3730 3764 'description': gist.gist_description,
3731 3765 'url': gist.gist_url(),
3732 3766 'expires': gist.gist_expires,
3733 3767 'created_on': gist.created_on,
3734 3768 'modified_at': gist.modified_at,
3735 3769 'content': None,
3736 3770 'acl_level': gist.acl_level,
3737 3771 }
3738 3772 return data
3739 3773
3740 3774 def __json__(self):
3741 3775 data = dict(
3742 3776 )
3743 3777 data.update(self.get_api_data())
3744 3778 return data
3745 3779 # SCM functions
3746 3780
3747 3781 def scm_instance(self, **kwargs):
3748 3782 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
3749 3783 return get_vcs_instance(
3750 3784 repo_path=safe_str(full_repo_path), create=False)
3751 3785
3752 3786
3753 3787 class ExternalIdentity(Base, BaseModel):
3754 3788 __tablename__ = 'external_identities'
3755 3789 __table_args__ = (
3756 3790 Index('local_user_id_idx', 'local_user_id'),
3757 3791 Index('external_id_idx', 'external_id'),
3758 3792 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3759 3793 'mysql_charset': 'utf8'})
3760 3794
3761 3795 external_id = Column('external_id', Unicode(255), default=u'',
3762 3796 primary_key=True)
3763 3797 external_username = Column('external_username', Unicode(1024), default=u'')
3764 3798 local_user_id = Column('local_user_id', Integer(),
3765 3799 ForeignKey('users.user_id'), primary_key=True)
3766 3800 provider_name = Column('provider_name', Unicode(255), default=u'',
3767 3801 primary_key=True)
3768 3802 access_token = Column('access_token', String(1024), default=u'')
3769 3803 alt_token = Column('alt_token', String(1024), default=u'')
3770 3804 token_secret = Column('token_secret', String(1024), default=u'')
3771 3805
3772 3806 @classmethod
3773 3807 def by_external_id_and_provider(cls, external_id, provider_name,
3774 3808 local_user_id=None):
3775 3809 """
3776 3810 Returns ExternalIdentity instance based on search params
3777 3811
3778 3812 :param external_id:
3779 3813 :param provider_name:
3780 3814 :return: ExternalIdentity
3781 3815 """
3782 3816 query = cls.query()
3783 3817 query = query.filter(cls.external_id == external_id)
3784 3818 query = query.filter(cls.provider_name == provider_name)
3785 3819 if local_user_id:
3786 3820 query = query.filter(cls.local_user_id == local_user_id)
3787 3821 return query.first()
3788 3822
3789 3823 @classmethod
3790 3824 def user_by_external_id_and_provider(cls, external_id, provider_name):
3791 3825 """
3792 3826 Returns User instance based on search params
3793 3827
3794 3828 :param external_id:
3795 3829 :param provider_name:
3796 3830 :return: User
3797 3831 """
3798 3832 query = User.query()
3799 3833 query = query.filter(cls.external_id == external_id)
3800 3834 query = query.filter(cls.provider_name == provider_name)
3801 3835 query = query.filter(User.user_id == cls.local_user_id)
3802 3836 return query.first()
3803 3837
3804 3838 @classmethod
3805 3839 def by_local_user_id(cls, local_user_id):
3806 3840 """
3807 3841 Returns all tokens for user
3808 3842
3809 3843 :param local_user_id:
3810 3844 :return: ExternalIdentity
3811 3845 """
3812 3846 query = cls.query()
3813 3847 query = query.filter(cls.local_user_id == local_user_id)
3814 3848 return query
3815 3849
3816 3850
3817 3851 class Integration(Base, BaseModel):
3818 3852 __tablename__ = 'integrations'
3819 3853 __table_args__ = (
3820 3854 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3821 3855 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3822 3856 )
3823 3857
3824 3858 integration_id = Column('integration_id', Integer(), primary_key=True)
3825 3859 integration_type = Column('integration_type', String(255))
3826 3860 enabled = Column('enabled', Boolean(), nullable=False)
3827 3861 name = Column('name', String(255), nullable=False)
3828 3862 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
3829 3863 default=False)
3830 3864
3831 3865 settings = Column(
3832 3866 'settings_json', MutationObj.as_mutable(
3833 3867 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3834 3868 repo_id = Column(
3835 3869 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
3836 3870 nullable=True, unique=None, default=None)
3837 3871 repo = relationship('Repository', lazy='joined')
3838 3872
3839 3873 repo_group_id = Column(
3840 3874 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
3841 3875 nullable=True, unique=None, default=None)
3842 3876 repo_group = relationship('RepoGroup', lazy='joined')
3843 3877
3844 3878 @property
3845 3879 def scope(self):
3846 3880 if self.repo:
3847 3881 return repr(self.repo)
3848 3882 if self.repo_group:
3849 3883 if self.child_repos_only:
3850 3884 return repr(self.repo_group) + ' (child repos only)'
3851 3885 else:
3852 3886 return repr(self.repo_group) + ' (recursive)'
3853 3887 if self.child_repos_only:
3854 3888 return 'root_repos'
3855 3889 return 'global'
3856 3890
3857 3891 def __repr__(self):
3858 3892 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
3859 3893
3860 3894
3861 3895 class RepoReviewRuleUser(Base, BaseModel):
3862 3896 __tablename__ = 'repo_review_rules_users'
3863 3897 __table_args__ = (
3864 3898 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3865 3899 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
3866 3900 )
3867 3901 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
3868 3902 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
3869 3903 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
3870 3904 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3871 3905 user = relationship('User')
3872 3906
3873 3907 def rule_data(self):
3874 3908 return {
3875 3909 'mandatory': self.mandatory
3876 3910 }
3877 3911
3878 3912
3879 3913 class RepoReviewRuleUserGroup(Base, BaseModel):
3880 3914 __tablename__ = 'repo_review_rules_users_groups'
3881 3915 __table_args__ = (
3882 3916 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3883 3917 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
3884 3918 )
3885 3919 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
3886 3920 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
3887 3921 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
3888 3922 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3889 3923 users_group = relationship('UserGroup')
3890 3924
3891 3925 def rule_data(self):
3892 3926 return {
3893 3927 'mandatory': self.mandatory
3894 3928 }
3895 3929
3896 3930
3897 3931 class RepoReviewRule(Base, BaseModel):
3898 3932 __tablename__ = 'repo_review_rules'
3899 3933 __table_args__ = (
3900 3934 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3901 3935 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
3902 3936 )
3903 3937
3904 3938 repo_review_rule_id = Column(
3905 3939 'repo_review_rule_id', Integer(), primary_key=True)
3906 3940 repo_id = Column(
3907 3941 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
3908 3942 repo = relationship('Repository', backref='review_rules')
3909 3943
3910 3944 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
3911 3945 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
3912 3946
3913 3947 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
3914 3948 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
3915 3949 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
3916 3950 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
3917 3951
3918 3952 rule_users = relationship('RepoReviewRuleUser')
3919 3953 rule_user_groups = relationship('RepoReviewRuleUserGroup')
3920 3954
3921 3955 @hybrid_property
3922 3956 def branch_pattern(self):
3923 3957 return self._branch_pattern or '*'
3924 3958
3925 3959 def _validate_glob(self, value):
3926 3960 re.compile('^' + glob2re(value) + '$')
3927 3961
3928 3962 @branch_pattern.setter
3929 3963 def branch_pattern(self, value):
3930 3964 self._validate_glob(value)
3931 3965 self._branch_pattern = value or '*'
3932 3966
3933 3967 @hybrid_property
3934 3968 def file_pattern(self):
3935 3969 return self._file_pattern or '*'
3936 3970
3937 3971 @file_pattern.setter
3938 3972 def file_pattern(self, value):
3939 3973 self._validate_glob(value)
3940 3974 self._file_pattern = value or '*'
3941 3975
3942 3976 def matches(self, branch, files_changed):
3943 3977 """
3944 3978 Check if this review rule matches a branch/files in a pull request
3945 3979
3946 3980 :param branch: branch name for the commit
3947 3981 :param files_changed: list of file paths changed in the pull request
3948 3982 """
3949 3983
3950 3984 branch = branch or ''
3951 3985 files_changed = files_changed or []
3952 3986
3953 3987 branch_matches = True
3954 3988 if branch:
3955 3989 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
3956 3990 branch_matches = bool(branch_regex.search(branch))
3957 3991
3958 3992 files_matches = True
3959 3993 if self.file_pattern != '*':
3960 3994 files_matches = False
3961 3995 file_regex = re.compile(glob2re(self.file_pattern))
3962 3996 for filename in files_changed:
3963 3997 if file_regex.search(filename):
3964 3998 files_matches = True
3965 3999 break
3966 4000
3967 4001 return branch_matches and files_matches
3968 4002
3969 4003 @property
3970 4004 def review_users(self):
3971 4005 """ Returns the users which this rule applies to """
3972 4006
3973 4007 users = collections.OrderedDict()
3974 4008
3975 4009 for rule_user in self.rule_users:
3976 4010 if rule_user.user.active:
3977 4011 if rule_user.user not in users:
3978 4012 users[rule_user.user.username] = {
3979 4013 'user': rule_user.user,
3980 4014 'source': 'user',
3981 4015 'source_data': {},
3982 4016 'data': rule_user.rule_data()
3983 4017 }
3984 4018
3985 4019 for rule_user_group in self.rule_user_groups:
3986 4020 source_data = {
3987 4021 'name': rule_user_group.users_group.users_group_name,
3988 4022 'members': len(rule_user_group.users_group.members)
3989 4023 }
3990 4024 for member in rule_user_group.users_group.members:
3991 4025 if member.user.active:
3992 4026 users[member.user.username] = {
3993 4027 'user': member.user,
3994 4028 'source': 'user_group',
3995 4029 'source_data': source_data,
3996 4030 'data': rule_user_group.rule_data()
3997 4031 }
3998 4032
3999 4033 return users
4000 4034
4001 4035 def __repr__(self):
4002 4036 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4003 4037 self.repo_review_rule_id, self.repo)
4004 4038
4005 4039
4006 4040 class DbMigrateVersion(Base, BaseModel):
4007 4041 __tablename__ = 'db_migrate_version'
4008 4042 __table_args__ = (
4009 4043 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4010 4044 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4011 4045 )
4012 4046 repository_id = Column('repository_id', String(250), primary_key=True)
4013 4047 repository_path = Column('repository_path', Text)
4014 4048 version = Column('version', Integer)
4015 4049
4016 4050
4017 4051 class DbSession(Base, BaseModel):
4018 4052 __tablename__ = 'db_session'
4019 4053 __table_args__ = (
4020 4054 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4021 4055 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4022 4056 )
4023 4057
4024 4058 def __repr__(self):
4025 4059 return '<DB:DbSession({})>'.format(self.id)
4026 4060
4027 4061 id = Column('id', Integer())
4028 4062 namespace = Column('namespace', String(255), primary_key=True)
4029 4063 accessed = Column('accessed', DateTime, nullable=False)
4030 4064 created = Column('created', DateTime, nullable=False)
4031 4065 data = Column('data', PickleType, nullable=False)
@@ -1,1528 +1,1554 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26 from collections import namedtuple
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31
32 32 from pylons.i18n.translation import _
33 33 from pylons.i18n.translation import lazy_ugettext
34 34 from pyramid.threadlocal import get_current_request
35 35 from sqlalchemy import or_
36 36
37 37 from rhodecode import events
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 from rhodecode.lib import audit_logger
39 40 from rhodecode.lib.compat import OrderedDict
40 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 42 from rhodecode.lib.markup_renderer import (
42 43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 from rhodecode.lib.utils import action_logger
44 44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 45 from rhodecode.lib.vcs.backends.base import (
46 46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 48 from rhodecode.lib.vcs.exceptions import (
49 49 CommitDoesNotExistError, EmptyRepositoryError)
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 52 from rhodecode.model.comment import CommentsModel
53 53 from rhodecode.model.db import (
54 54 PullRequest, PullRequestReviewers, ChangesetStatus,
55 55 PullRequestVersion, ChangesetComment, Repository)
56 56 from rhodecode.model.meta import Session
57 57 from rhodecode.model.notification import NotificationModel, \
58 58 EmailNotificationModel
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.settings import VcsSettingsModel
61 61
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 # Data structure to hold the response data when updating commits during a pull
67 67 # request update.
68 68 UpdateResponse = namedtuple('UpdateResponse', [
69 69 'executed', 'reason', 'new', 'old', 'changes',
70 70 'source_changed', 'target_changed'])
71 71
72 72
73 73 class PullRequestModel(BaseModel):
74 74
75 75 cls = PullRequest
76 76
77 77 DIFF_CONTEXT = 3
78 78
79 79 MERGE_STATUS_MESSAGES = {
80 80 MergeFailureReason.NONE: lazy_ugettext(
81 81 'This pull request can be automatically merged.'),
82 82 MergeFailureReason.UNKNOWN: lazy_ugettext(
83 83 'This pull request cannot be merged because of an unhandled'
84 84 ' exception.'),
85 85 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
86 86 'This pull request cannot be merged because of merge conflicts.'),
87 87 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
88 88 'This pull request could not be merged because push to target'
89 89 ' failed.'),
90 90 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
91 91 'This pull request cannot be merged because the target is not a'
92 92 ' head.'),
93 93 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
94 94 'This pull request cannot be merged because the source contains'
95 95 ' more branches than the target.'),
96 96 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
97 97 'This pull request cannot be merged because the target has'
98 98 ' multiple heads.'),
99 99 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
100 100 'This pull request cannot be merged because the target repository'
101 101 ' is locked.'),
102 102 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
103 103 'This pull request cannot be merged because the target or the '
104 104 'source reference is missing.'),
105 105 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
106 106 'This pull request cannot be merged because the target '
107 107 'reference is missing.'),
108 108 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
109 109 'This pull request cannot be merged because the source '
110 110 'reference is missing.'),
111 111 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
112 112 'This pull request cannot be merged because of conflicts related '
113 113 'to sub repositories.'),
114 114 }
115 115
116 116 UPDATE_STATUS_MESSAGES = {
117 117 UpdateFailureReason.NONE: lazy_ugettext(
118 118 'Pull request update successful.'),
119 119 UpdateFailureReason.UNKNOWN: lazy_ugettext(
120 120 'Pull request update failed because of an unknown error.'),
121 121 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
122 122 'No update needed because the source and target have not changed.'),
123 123 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
124 124 'Pull request cannot be updated because the reference type is '
125 125 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
126 126 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
127 127 'This pull request cannot be updated because the target '
128 128 'reference is missing.'),
129 129 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
130 130 'This pull request cannot be updated because the source '
131 131 'reference is missing.'),
132 132 }
133 133
134 134 def __get_pull_request(self, pull_request):
135 135 return self._get_instance((
136 136 PullRequest, PullRequestVersion), pull_request)
137 137
138 138 def _check_perms(self, perms, pull_request, user, api=False):
139 139 if not api:
140 140 return h.HasRepoPermissionAny(*perms)(
141 141 user=user, repo_name=pull_request.target_repo.repo_name)
142 142 else:
143 143 return h.HasRepoPermissionAnyApi(*perms)(
144 144 user=user, repo_name=pull_request.target_repo.repo_name)
145 145
146 146 def check_user_read(self, pull_request, user, api=False):
147 147 _perms = ('repository.admin', 'repository.write', 'repository.read',)
148 148 return self._check_perms(_perms, pull_request, user, api)
149 149
150 150 def check_user_merge(self, pull_request, user, api=False):
151 151 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
152 152 return self._check_perms(_perms, pull_request, user, api)
153 153
154 154 def check_user_update(self, pull_request, user, api=False):
155 155 owner = user.user_id == pull_request.user_id
156 156 return self.check_user_merge(pull_request, user, api) or owner
157 157
158 158 def check_user_delete(self, pull_request, user):
159 159 owner = user.user_id == pull_request.user_id
160 160 _perms = ('repository.admin',)
161 161 return self._check_perms(_perms, pull_request, user) or owner
162 162
163 163 def check_user_change_status(self, pull_request, user, api=False):
164 164 reviewer = user.user_id in [x.user_id for x in
165 165 pull_request.reviewers]
166 166 return self.check_user_update(pull_request, user, api) or reviewer
167 167
168 168 def get(self, pull_request):
169 169 return self.__get_pull_request(pull_request)
170 170
171 171 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
172 172 opened_by=None, order_by=None,
173 173 order_dir='desc'):
174 174 repo = None
175 175 if repo_name:
176 176 repo = self._get_repo(repo_name)
177 177
178 178 q = PullRequest.query()
179 179
180 180 # source or target
181 181 if repo and source:
182 182 q = q.filter(PullRequest.source_repo == repo)
183 183 elif repo:
184 184 q = q.filter(PullRequest.target_repo == repo)
185 185
186 186 # closed,opened
187 187 if statuses:
188 188 q = q.filter(PullRequest.status.in_(statuses))
189 189
190 190 # opened by filter
191 191 if opened_by:
192 192 q = q.filter(PullRequest.user_id.in_(opened_by))
193 193
194 194 if order_by:
195 195 order_map = {
196 196 'name_raw': PullRequest.pull_request_id,
197 197 'title': PullRequest.title,
198 198 'updated_on_raw': PullRequest.updated_on,
199 199 'target_repo': PullRequest.target_repo_id
200 200 }
201 201 if order_dir == 'asc':
202 202 q = q.order_by(order_map[order_by].asc())
203 203 else:
204 204 q = q.order_by(order_map[order_by].desc())
205 205
206 206 return q
207 207
208 208 def count_all(self, repo_name, source=False, statuses=None,
209 209 opened_by=None):
210 210 """
211 211 Count the number of pull requests for a specific repository.
212 212
213 213 :param repo_name: target or source repo
214 214 :param source: boolean flag to specify if repo_name refers to source
215 215 :param statuses: list of pull request statuses
216 216 :param opened_by: author user of the pull request
217 217 :returns: int number of pull requests
218 218 """
219 219 q = self._prepare_get_all_query(
220 220 repo_name, source=source, statuses=statuses, opened_by=opened_by)
221 221
222 222 return q.count()
223 223
224 224 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
225 225 offset=0, length=None, order_by=None, order_dir='desc'):
226 226 """
227 227 Get all pull requests for a specific repository.
228 228
229 229 :param repo_name: target or source repo
230 230 :param source: boolean flag to specify if repo_name refers to source
231 231 :param statuses: list of pull request statuses
232 232 :param opened_by: author user of the pull request
233 233 :param offset: pagination offset
234 234 :param length: length of returned list
235 235 :param order_by: order of the returned list
236 236 :param order_dir: 'asc' or 'desc' ordering direction
237 237 :returns: list of pull requests
238 238 """
239 239 q = self._prepare_get_all_query(
240 240 repo_name, source=source, statuses=statuses, opened_by=opened_by,
241 241 order_by=order_by, order_dir=order_dir)
242 242
243 243 if length:
244 244 pull_requests = q.limit(length).offset(offset).all()
245 245 else:
246 246 pull_requests = q.all()
247 247
248 248 return pull_requests
249 249
250 250 def count_awaiting_review(self, repo_name, source=False, statuses=None,
251 251 opened_by=None):
252 252 """
253 253 Count the number of pull requests for a specific repository that are
254 254 awaiting review.
255 255
256 256 :param repo_name: target or source repo
257 257 :param source: boolean flag to specify if repo_name refers to source
258 258 :param statuses: list of pull request statuses
259 259 :param opened_by: author user of the pull request
260 260 :returns: int number of pull requests
261 261 """
262 262 pull_requests = self.get_awaiting_review(
263 263 repo_name, source=source, statuses=statuses, opened_by=opened_by)
264 264
265 265 return len(pull_requests)
266 266
267 267 def get_awaiting_review(self, repo_name, source=False, statuses=None,
268 268 opened_by=None, offset=0, length=None,
269 269 order_by=None, order_dir='desc'):
270 270 """
271 271 Get all pull requests for a specific repository that are awaiting
272 272 review.
273 273
274 274 :param repo_name: target or source repo
275 275 :param source: boolean flag to specify if repo_name refers to source
276 276 :param statuses: list of pull request statuses
277 277 :param opened_by: author user of the pull request
278 278 :param offset: pagination offset
279 279 :param length: length of returned list
280 280 :param order_by: order of the returned list
281 281 :param order_dir: 'asc' or 'desc' ordering direction
282 282 :returns: list of pull requests
283 283 """
284 284 pull_requests = self.get_all(
285 285 repo_name, source=source, statuses=statuses, opened_by=opened_by,
286 286 order_by=order_by, order_dir=order_dir)
287 287
288 288 _filtered_pull_requests = []
289 289 for pr in pull_requests:
290 290 status = pr.calculated_review_status()
291 291 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
292 292 ChangesetStatus.STATUS_UNDER_REVIEW]:
293 293 _filtered_pull_requests.append(pr)
294 294 if length:
295 295 return _filtered_pull_requests[offset:offset+length]
296 296 else:
297 297 return _filtered_pull_requests
298 298
299 299 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
300 300 opened_by=None, user_id=None):
301 301 """
302 302 Count the number of pull requests for a specific repository that are
303 303 awaiting review from a specific user.
304 304
305 305 :param repo_name: target or source repo
306 306 :param source: boolean flag to specify if repo_name refers to source
307 307 :param statuses: list of pull request statuses
308 308 :param opened_by: author user of the pull request
309 309 :param user_id: reviewer user of the pull request
310 310 :returns: int number of pull requests
311 311 """
312 312 pull_requests = self.get_awaiting_my_review(
313 313 repo_name, source=source, statuses=statuses, opened_by=opened_by,
314 314 user_id=user_id)
315 315
316 316 return len(pull_requests)
317 317
318 318 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
319 319 opened_by=None, user_id=None, offset=0,
320 320 length=None, order_by=None, order_dir='desc'):
321 321 """
322 322 Get all pull requests for a specific repository that are awaiting
323 323 review from a specific user.
324 324
325 325 :param repo_name: target or source repo
326 326 :param source: boolean flag to specify if repo_name refers to source
327 327 :param statuses: list of pull request statuses
328 328 :param opened_by: author user of the pull request
329 329 :param user_id: reviewer user of the pull request
330 330 :param offset: pagination offset
331 331 :param length: length of returned list
332 332 :param order_by: order of the returned list
333 333 :param order_dir: 'asc' or 'desc' ordering direction
334 334 :returns: list of pull requests
335 335 """
336 336 pull_requests = self.get_all(
337 337 repo_name, source=source, statuses=statuses, opened_by=opened_by,
338 338 order_by=order_by, order_dir=order_dir)
339 339
340 340 _my = PullRequestModel().get_not_reviewed(user_id)
341 341 my_participation = []
342 342 for pr in pull_requests:
343 343 if pr in _my:
344 344 my_participation.append(pr)
345 345 _filtered_pull_requests = my_participation
346 346 if length:
347 347 return _filtered_pull_requests[offset:offset+length]
348 348 else:
349 349 return _filtered_pull_requests
350 350
351 351 def get_not_reviewed(self, user_id):
352 352 return [
353 353 x.pull_request for x in PullRequestReviewers.query().filter(
354 354 PullRequestReviewers.user_id == user_id).all()
355 355 ]
356 356
357 357 def _prepare_participating_query(self, user_id=None, statuses=None,
358 358 order_by=None, order_dir='desc'):
359 359 q = PullRequest.query()
360 360 if user_id:
361 361 reviewers_subquery = Session().query(
362 362 PullRequestReviewers.pull_request_id).filter(
363 363 PullRequestReviewers.user_id == user_id).subquery()
364 364 user_filter= or_(
365 365 PullRequest.user_id == user_id,
366 366 PullRequest.pull_request_id.in_(reviewers_subquery)
367 367 )
368 368 q = PullRequest.query().filter(user_filter)
369 369
370 370 # closed,opened
371 371 if statuses:
372 372 q = q.filter(PullRequest.status.in_(statuses))
373 373
374 374 if order_by:
375 375 order_map = {
376 376 'name_raw': PullRequest.pull_request_id,
377 377 'title': PullRequest.title,
378 378 'updated_on_raw': PullRequest.updated_on,
379 379 'target_repo': PullRequest.target_repo_id
380 380 }
381 381 if order_dir == 'asc':
382 382 q = q.order_by(order_map[order_by].asc())
383 383 else:
384 384 q = q.order_by(order_map[order_by].desc())
385 385
386 386 return q
387 387
388 388 def count_im_participating_in(self, user_id=None, statuses=None):
389 389 q = self._prepare_participating_query(user_id, statuses=statuses)
390 390 return q.count()
391 391
392 392 def get_im_participating_in(
393 393 self, user_id=None, statuses=None, offset=0,
394 394 length=None, order_by=None, order_dir='desc'):
395 395 """
396 396 Get all Pull requests that i'm participating in, or i have opened
397 397 """
398 398
399 399 q = self._prepare_participating_query(
400 400 user_id, statuses=statuses, order_by=order_by,
401 401 order_dir=order_dir)
402 402
403 403 if length:
404 404 pull_requests = q.limit(length).offset(offset).all()
405 405 else:
406 406 pull_requests = q.all()
407 407
408 408 return pull_requests
409 409
410 410 def get_versions(self, pull_request):
411 411 """
412 412 returns version of pull request sorted by ID descending
413 413 """
414 414 return PullRequestVersion.query()\
415 415 .filter(PullRequestVersion.pull_request == pull_request)\
416 416 .order_by(PullRequestVersion.pull_request_version_id.asc())\
417 417 .all()
418 418
419 419 def create(self, created_by, source_repo, source_ref, target_repo,
420 420 target_ref, revisions, reviewers, title, description=None,
421 421 reviewer_data=None):
422 422
423 423 created_by_user = self._get_user(created_by)
424 424 source_repo = self._get_repo(source_repo)
425 425 target_repo = self._get_repo(target_repo)
426 426
427 427 pull_request = PullRequest()
428 428 pull_request.source_repo = source_repo
429 429 pull_request.source_ref = source_ref
430 430 pull_request.target_repo = target_repo
431 431 pull_request.target_ref = target_ref
432 432 pull_request.revisions = revisions
433 433 pull_request.title = title
434 434 pull_request.description = description
435 435 pull_request.author = created_by_user
436 436 pull_request.reviewer_data = reviewer_data
437 437
438 438 Session().add(pull_request)
439 439 Session().flush()
440 440
441 441 reviewer_ids = set()
442 442 # members / reviewers
443 443 for reviewer_object in reviewers:
444 444 user_id, reasons, mandatory = reviewer_object
445 445 user = self._get_user(user_id)
446 446
447 447 # skip duplicates
448 448 if user.user_id in reviewer_ids:
449 449 continue
450 450
451 451 reviewer_ids.add(user.user_id)
452 452
453 453 reviewer = PullRequestReviewers()
454 454 reviewer.user = user
455 455 reviewer.pull_request = pull_request
456 456 reviewer.reasons = reasons
457 457 reviewer.mandatory = mandatory
458 458 Session().add(reviewer)
459 459
460 460 # Set approval status to "Under Review" for all commits which are
461 461 # part of this pull request.
462 462 ChangesetStatusModel().set_status(
463 463 repo=target_repo,
464 464 status=ChangesetStatus.STATUS_UNDER_REVIEW,
465 465 user=created_by_user,
466 466 pull_request=pull_request
467 467 )
468 468
469 469 self.notify_reviewers(pull_request, reviewer_ids)
470 470 self._trigger_pull_request_hook(
471 471 pull_request, created_by_user, 'create')
472 472
473 creation_data = pull_request.get_api_data(with_merge_state=False)
474 self._log_audit_action(
475 'repo.pull_request.create', {'data': creation_data},
476 created_by_user, pull_request)
477
473 478 return pull_request
474 479
475 480 def _trigger_pull_request_hook(self, pull_request, user, action):
476 481 pull_request = self.__get_pull_request(pull_request)
477 482 target_scm = pull_request.target_repo.scm_instance()
478 483 if action == 'create':
479 484 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
480 485 elif action == 'merge':
481 486 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
482 487 elif action == 'close':
483 488 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
484 489 elif action == 'review_status_change':
485 490 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
486 491 elif action == 'update':
487 492 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
488 493 else:
489 494 return
490 495
491 496 trigger_hook(
492 497 username=user.username,
493 498 repo_name=pull_request.target_repo.repo_name,
494 499 repo_alias=target_scm.alias,
495 500 pull_request=pull_request)
496 501
497 502 def _get_commit_ids(self, pull_request):
498 503 """
499 504 Return the commit ids of the merged pull request.
500 505
501 506 This method is not dealing correctly yet with the lack of autoupdates
502 507 nor with the implicit target updates.
503 508 For example: if a commit in the source repo is already in the target it
504 509 will be reported anyways.
505 510 """
506 511 merge_rev = pull_request.merge_rev
507 512 if merge_rev is None:
508 513 raise ValueError('This pull request was not merged yet')
509 514
510 515 commit_ids = list(pull_request.revisions)
511 516 if merge_rev not in commit_ids:
512 517 commit_ids.append(merge_rev)
513 518
514 519 return commit_ids
515 520
516 521 def merge(self, pull_request, user, extras):
517 522 log.debug("Merging pull request %s", pull_request.pull_request_id)
518 523 merge_state = self._merge_pull_request(pull_request, user, extras)
519 524 if merge_state.executed:
520 525 log.debug(
521 526 "Merge was successful, updating the pull request comments.")
522 527 self._comment_and_close_pr(pull_request, user, merge_state)
523 self._log_action('user_merged_pull_request', user, pull_request)
528
529 self._log_audit_action(
530 'repo.pull_request.merge',
531 {'merge_state': merge_state.__dict__},
532 user, pull_request)
533
524 534 else:
525 535 log.warn("Merge failed, not updating the pull request.")
526 536 return merge_state
527 537
528 538 def _merge_pull_request(self, pull_request, user, extras):
529 539 target_vcs = pull_request.target_repo.scm_instance()
530 540 source_vcs = pull_request.source_repo.scm_instance()
531 541 target_ref = self._refresh_reference(
532 542 pull_request.target_ref_parts, target_vcs)
533 543
534 544 message = _(
535 545 'Merge pull request #%(pr_id)s from '
536 546 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
537 547 'pr_id': pull_request.pull_request_id,
538 548 'source_repo': source_vcs.name,
539 549 'source_ref_name': pull_request.source_ref_parts.name,
540 550 'pr_title': pull_request.title
541 551 }
542 552
543 553 workspace_id = self._workspace_id(pull_request)
544 554 use_rebase = self._use_rebase_for_merging(pull_request)
545 555
546 556 callback_daemon, extras = prepare_callback_daemon(
547 557 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
548 558 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
549 559
550 560 with callback_daemon:
551 561 # TODO: johbo: Implement a clean way to run a config_override
552 562 # for a single call.
553 563 target_vcs.config.set(
554 564 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
555 565 merge_state = target_vcs.merge(
556 566 target_ref, source_vcs, pull_request.source_ref_parts,
557 567 workspace_id, user_name=user.username,
558 568 user_email=user.email, message=message, use_rebase=use_rebase)
559 569 return merge_state
560 570
561 571 def _comment_and_close_pr(self, pull_request, user, merge_state):
562 572 pull_request.merge_rev = merge_state.merge_ref.commit_id
563 573 pull_request.updated_on = datetime.datetime.now()
564 574
565 575 CommentsModel().create(
566 576 text=unicode(_('Pull request merged and closed')),
567 577 repo=pull_request.target_repo.repo_id,
568 578 user=user.user_id,
569 579 pull_request=pull_request.pull_request_id,
570 580 f_path=None,
571 581 line_no=None,
572 582 closing_pr=True
573 583 )
574 584
575 585 Session().add(pull_request)
576 586 Session().flush()
577 587 # TODO: paris: replace invalidation with less radical solution
578 588 ScmModel().mark_for_invalidation(
579 589 pull_request.target_repo.repo_name)
580 590 self._trigger_pull_request_hook(pull_request, user, 'merge')
581 591
582 592 def has_valid_update_type(self, pull_request):
583 593 source_ref_type = pull_request.source_ref_parts.type
584 594 return source_ref_type in ['book', 'branch', 'tag']
585 595
586 596 def update_commits(self, pull_request):
587 597 """
588 598 Get the updated list of commits for the pull request
589 599 and return the new pull request version and the list
590 600 of commits processed by this update action
591 601 """
592 602 pull_request = self.__get_pull_request(pull_request)
593 603 source_ref_type = pull_request.source_ref_parts.type
594 604 source_ref_name = pull_request.source_ref_parts.name
595 605 source_ref_id = pull_request.source_ref_parts.commit_id
596 606
597 607 target_ref_type = pull_request.target_ref_parts.type
598 608 target_ref_name = pull_request.target_ref_parts.name
599 609 target_ref_id = pull_request.target_ref_parts.commit_id
600 610
601 611 if not self.has_valid_update_type(pull_request):
602 612 log.debug(
603 613 "Skipping update of pull request %s due to ref type: %s",
604 614 pull_request, source_ref_type)
605 615 return UpdateResponse(
606 616 executed=False,
607 617 reason=UpdateFailureReason.WRONG_REF_TYPE,
608 618 old=pull_request, new=None, changes=None,
609 619 source_changed=False, target_changed=False)
610 620
611 621 # source repo
612 622 source_repo = pull_request.source_repo.scm_instance()
613 623 try:
614 624 source_commit = source_repo.get_commit(commit_id=source_ref_name)
615 625 except CommitDoesNotExistError:
616 626 return UpdateResponse(
617 627 executed=False,
618 628 reason=UpdateFailureReason.MISSING_SOURCE_REF,
619 629 old=pull_request, new=None, changes=None,
620 630 source_changed=False, target_changed=False)
621 631
622 632 source_changed = source_ref_id != source_commit.raw_id
623 633
624 634 # target repo
625 635 target_repo = pull_request.target_repo.scm_instance()
626 636 try:
627 637 target_commit = target_repo.get_commit(commit_id=target_ref_name)
628 638 except CommitDoesNotExistError:
629 639 return UpdateResponse(
630 640 executed=False,
631 641 reason=UpdateFailureReason.MISSING_TARGET_REF,
632 642 old=pull_request, new=None, changes=None,
633 643 source_changed=False, target_changed=False)
634 644 target_changed = target_ref_id != target_commit.raw_id
635 645
636 646 if not (source_changed or target_changed):
637 647 log.debug("Nothing changed in pull request %s", pull_request)
638 648 return UpdateResponse(
639 649 executed=False,
640 650 reason=UpdateFailureReason.NO_CHANGE,
641 651 old=pull_request, new=None, changes=None,
642 652 source_changed=target_changed, target_changed=source_changed)
643 653
644 654 change_in_found = 'target repo' if target_changed else 'source repo'
645 655 log.debug('Updating pull request because of change in %s detected',
646 656 change_in_found)
647 657
648 658 # Finally there is a need for an update, in case of source change
649 659 # we create a new version, else just an update
650 660 if source_changed:
651 661 pull_request_version = self._create_version_from_snapshot(pull_request)
652 662 self._link_comments_to_version(pull_request_version)
653 663 else:
654 664 try:
655 665 ver = pull_request.versions[-1]
656 666 except IndexError:
657 667 ver = None
658 668
659 669 pull_request.pull_request_version_id = \
660 670 ver.pull_request_version_id if ver else None
661 671 pull_request_version = pull_request
662 672
663 673 try:
664 674 if target_ref_type in ('tag', 'branch', 'book'):
665 675 target_commit = target_repo.get_commit(target_ref_name)
666 676 else:
667 677 target_commit = target_repo.get_commit(target_ref_id)
668 678 except CommitDoesNotExistError:
669 679 return UpdateResponse(
670 680 executed=False,
671 681 reason=UpdateFailureReason.MISSING_TARGET_REF,
672 682 old=pull_request, new=None, changes=None,
673 683 source_changed=source_changed, target_changed=target_changed)
674 684
675 685 # re-compute commit ids
676 686 old_commit_ids = pull_request.revisions
677 687 pre_load = ["author", "branch", "date", "message"]
678 688 commit_ranges = target_repo.compare(
679 689 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
680 690 pre_load=pre_load)
681 691
682 692 ancestor = target_repo.get_common_ancestor(
683 693 target_commit.raw_id, source_commit.raw_id, source_repo)
684 694
685 695 pull_request.source_ref = '%s:%s:%s' % (
686 696 source_ref_type, source_ref_name, source_commit.raw_id)
687 697 pull_request.target_ref = '%s:%s:%s' % (
688 698 target_ref_type, target_ref_name, ancestor)
689 699
690 700 pull_request.revisions = [
691 701 commit.raw_id for commit in reversed(commit_ranges)]
692 702 pull_request.updated_on = datetime.datetime.now()
693 703 Session().add(pull_request)
694 704 new_commit_ids = pull_request.revisions
695 705
696 706 old_diff_data, new_diff_data = self._generate_update_diffs(
697 707 pull_request, pull_request_version)
698 708
699 709 # calculate commit and file changes
700 710 changes = self._calculate_commit_id_changes(
701 711 old_commit_ids, new_commit_ids)
702 712 file_changes = self._calculate_file_changes(
703 713 old_diff_data, new_diff_data)
704 714
705 715 # set comments as outdated if DIFFS changed
706 716 CommentsModel().outdate_comments(
707 717 pull_request, old_diff_data=old_diff_data,
708 718 new_diff_data=new_diff_data)
709 719
710 720 commit_changes = (changes.added or changes.removed)
711 721 file_node_changes = (
712 722 file_changes.added or file_changes.modified or file_changes.removed)
713 723 pr_has_changes = commit_changes or file_node_changes
714 724
715 725 # Add an automatic comment to the pull request, in case
716 726 # anything has changed
717 727 if pr_has_changes:
718 728 update_comment = CommentsModel().create(
719 729 text=self._render_update_message(changes, file_changes),
720 730 repo=pull_request.target_repo,
721 731 user=pull_request.author,
722 732 pull_request=pull_request,
723 733 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
724 734
725 735 # Update status to "Under Review" for added commits
726 736 for commit_id in changes.added:
727 737 ChangesetStatusModel().set_status(
728 738 repo=pull_request.source_repo,
729 739 status=ChangesetStatus.STATUS_UNDER_REVIEW,
730 740 comment=update_comment,
731 741 user=pull_request.author,
732 742 pull_request=pull_request,
733 743 revision=commit_id)
734 744
735 745 log.debug(
736 746 'Updated pull request %s, added_ids: %s, common_ids: %s, '
737 747 'removed_ids: %s', pull_request.pull_request_id,
738 748 changes.added, changes.common, changes.removed)
739 749 log.debug(
740 750 'Updated pull request with the following file changes: %s',
741 751 file_changes)
742 752
743 753 log.info(
744 754 "Updated pull request %s from commit %s to commit %s, "
745 755 "stored new version %s of this pull request.",
746 756 pull_request.pull_request_id, source_ref_id,
747 757 pull_request.source_ref_parts.commit_id,
748 758 pull_request_version.pull_request_version_id)
749 759 Session().commit()
750 760 self._trigger_pull_request_hook(
751 761 pull_request, pull_request.author, 'update')
752 762
753 763 return UpdateResponse(
754 764 executed=True, reason=UpdateFailureReason.NONE,
755 765 old=pull_request, new=pull_request_version, changes=changes,
756 766 source_changed=source_changed, target_changed=target_changed)
757 767
758 768 def _create_version_from_snapshot(self, pull_request):
759 769 version = PullRequestVersion()
760 770 version.title = pull_request.title
761 771 version.description = pull_request.description
762 772 version.status = pull_request.status
763 773 version.created_on = datetime.datetime.now()
764 774 version.updated_on = pull_request.updated_on
765 775 version.user_id = pull_request.user_id
766 776 version.source_repo = pull_request.source_repo
767 777 version.source_ref = pull_request.source_ref
768 778 version.target_repo = pull_request.target_repo
769 779 version.target_ref = pull_request.target_ref
770 780
771 781 version._last_merge_source_rev = pull_request._last_merge_source_rev
772 782 version._last_merge_target_rev = pull_request._last_merge_target_rev
773 783 version._last_merge_status = pull_request._last_merge_status
774 784 version.shadow_merge_ref = pull_request.shadow_merge_ref
775 785 version.merge_rev = pull_request.merge_rev
776 786 version.reviewer_data = pull_request.reviewer_data
777 787
778 788 version.revisions = pull_request.revisions
779 789 version.pull_request = pull_request
780 790 Session().add(version)
781 791 Session().flush()
782 792
783 793 return version
784 794
785 795 def _generate_update_diffs(self, pull_request, pull_request_version):
786 796
787 797 diff_context = (
788 798 self.DIFF_CONTEXT +
789 799 CommentsModel.needed_extra_diff_context())
790 800
791 801 source_repo = pull_request_version.source_repo
792 802 source_ref_id = pull_request_version.source_ref_parts.commit_id
793 803 target_ref_id = pull_request_version.target_ref_parts.commit_id
794 804 old_diff = self._get_diff_from_pr_or_version(
795 805 source_repo, source_ref_id, target_ref_id, context=diff_context)
796 806
797 807 source_repo = pull_request.source_repo
798 808 source_ref_id = pull_request.source_ref_parts.commit_id
799 809 target_ref_id = pull_request.target_ref_parts.commit_id
800 810
801 811 new_diff = self._get_diff_from_pr_or_version(
802 812 source_repo, source_ref_id, target_ref_id, context=diff_context)
803 813
804 814 old_diff_data = diffs.DiffProcessor(old_diff)
805 815 old_diff_data.prepare()
806 816 new_diff_data = diffs.DiffProcessor(new_diff)
807 817 new_diff_data.prepare()
808 818
809 819 return old_diff_data, new_diff_data
810 820
811 821 def _link_comments_to_version(self, pull_request_version):
812 822 """
813 823 Link all unlinked comments of this pull request to the given version.
814 824
815 825 :param pull_request_version: The `PullRequestVersion` to which
816 826 the comments shall be linked.
817 827
818 828 """
819 829 pull_request = pull_request_version.pull_request
820 830 comments = ChangesetComment.query()\
821 831 .filter(
822 832 # TODO: johbo: Should we query for the repo at all here?
823 833 # Pending decision on how comments of PRs are to be related
824 834 # to either the source repo, the target repo or no repo at all.
825 835 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
826 836 ChangesetComment.pull_request == pull_request,
827 837 ChangesetComment.pull_request_version == None)\
828 838 .order_by(ChangesetComment.comment_id.asc())
829 839
830 840 # TODO: johbo: Find out why this breaks if it is done in a bulk
831 841 # operation.
832 842 for comment in comments:
833 843 comment.pull_request_version_id = (
834 844 pull_request_version.pull_request_version_id)
835 845 Session().add(comment)
836 846
837 847 def _calculate_commit_id_changes(self, old_ids, new_ids):
838 848 added = [x for x in new_ids if x not in old_ids]
839 849 common = [x for x in new_ids if x in old_ids]
840 850 removed = [x for x in old_ids if x not in new_ids]
841 851 total = new_ids
842 852 return ChangeTuple(added, common, removed, total)
843 853
844 854 def _calculate_file_changes(self, old_diff_data, new_diff_data):
845 855
846 856 old_files = OrderedDict()
847 857 for diff_data in old_diff_data.parsed_diff:
848 858 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
849 859
850 860 added_files = []
851 861 modified_files = []
852 862 removed_files = []
853 863 for diff_data in new_diff_data.parsed_diff:
854 864 new_filename = diff_data['filename']
855 865 new_hash = md5_safe(diff_data['raw_diff'])
856 866
857 867 old_hash = old_files.get(new_filename)
858 868 if not old_hash:
859 869 # file is not present in old diff, means it's added
860 870 added_files.append(new_filename)
861 871 else:
862 872 if new_hash != old_hash:
863 873 modified_files.append(new_filename)
864 874 # now remove a file from old, since we have seen it already
865 875 del old_files[new_filename]
866 876
867 877 # removed files is when there are present in old, but not in NEW,
868 878 # since we remove old files that are present in new diff, left-overs
869 879 # if any should be the removed files
870 880 removed_files.extend(old_files.keys())
871 881
872 882 return FileChangeTuple(added_files, modified_files, removed_files)
873 883
874 884 def _render_update_message(self, changes, file_changes):
875 885 """
876 886 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
877 887 so it's always looking the same disregarding on which default
878 888 renderer system is using.
879 889
880 890 :param changes: changes named tuple
881 891 :param file_changes: file changes named tuple
882 892
883 893 """
884 894 new_status = ChangesetStatus.get_status_lbl(
885 895 ChangesetStatus.STATUS_UNDER_REVIEW)
886 896
887 897 changed_files = (
888 898 file_changes.added + file_changes.modified + file_changes.removed)
889 899
890 900 params = {
891 901 'under_review_label': new_status,
892 902 'added_commits': changes.added,
893 903 'removed_commits': changes.removed,
894 904 'changed_files': changed_files,
895 905 'added_files': file_changes.added,
896 906 'modified_files': file_changes.modified,
897 907 'removed_files': file_changes.removed,
898 908 }
899 909 renderer = RstTemplateRenderer()
900 910 return renderer.render('pull_request_update.mako', **params)
901 911
902 def edit(self, pull_request, title, description):
912 def edit(self, pull_request, title, description, user):
903 913 pull_request = self.__get_pull_request(pull_request)
914 old_data = pull_request.get_api_data(with_merge_state=False)
904 915 if pull_request.is_closed():
905 916 raise ValueError('This pull request is closed')
906 917 if title:
907 918 pull_request.title = title
908 919 pull_request.description = description
909 920 pull_request.updated_on = datetime.datetime.now()
910 921 Session().add(pull_request)
922 self._log_audit_action(
923 'repo.pull_request.edit', {'old_data': old_data},
924 user, pull_request)
911 925
912 def update_reviewers(self, pull_request, reviewer_data):
926 def update_reviewers(self, pull_request, reviewer_data, user):
913 927 """
914 928 Update the reviewers in the pull request
915 929
916 930 :param pull_request: the pr to update
917 931 :param reviewer_data: list of tuples
918 932 [(user, ['reason1', 'reason2'], mandatory_flag)]
919 933 """
920 934
921 935 reviewers = {}
922 936 for user_id, reasons, mandatory in reviewer_data:
923 937 if isinstance(user_id, (int, basestring)):
924 938 user_id = self._get_user(user_id).user_id
925 939 reviewers[user_id] = {
926 940 'reasons': reasons, 'mandatory': mandatory}
927 941
928 942 reviewers_ids = set(reviewers.keys())
929 943 pull_request = self.__get_pull_request(pull_request)
930 944 current_reviewers = PullRequestReviewers.query()\
931 945 .filter(PullRequestReviewers.pull_request ==
932 946 pull_request).all()
933 947 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
934 948
935 949 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
936 950 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
937 951
938 952 log.debug("Adding %s reviewers", ids_to_add)
939 953 log.debug("Removing %s reviewers", ids_to_remove)
940 954 changed = False
941 955 for uid in ids_to_add:
942 956 changed = True
943 957 _usr = self._get_user(uid)
944 958 reviewer = PullRequestReviewers()
945 959 reviewer.user = _usr
946 960 reviewer.pull_request = pull_request
947 961 reviewer.reasons = reviewers[uid]['reasons']
948 962 # NOTE(marcink): mandatory shouldn't be changed now
949 #reviewer.mandatory = reviewers[uid]['reasons']
963 # reviewer.mandatory = reviewers[uid]['reasons']
950 964 Session().add(reviewer)
965 self._log_audit_action(
966 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
967 user, pull_request)
951 968
952 969 for uid in ids_to_remove:
953 970 changed = True
954 971 reviewers = PullRequestReviewers.query()\
955 972 .filter(PullRequestReviewers.user_id == uid,
956 973 PullRequestReviewers.pull_request == pull_request)\
957 974 .all()
958 975 # use .all() in case we accidentally added the same person twice
959 976 # this CAN happen due to the lack of DB checks
960 977 for obj in reviewers:
978 old_data = obj.get_dict()
961 979 Session().delete(obj)
980 self._log_audit_action(
981 'repo.pull_request.reviewer.delete',
982 {'old_data': old_data}, user, pull_request)
962 983
963 984 if changed:
964 985 pull_request.updated_on = datetime.datetime.now()
965 986 Session().add(pull_request)
966 987
967 988 self.notify_reviewers(pull_request, ids_to_add)
968 989 return ids_to_add, ids_to_remove
969 990
970 991 def get_url(self, pull_request, request=None, permalink=False):
971 992 if not request:
972 993 request = get_current_request()
973 994
974 995 if permalink:
975 996 return request.route_url(
976 997 'pull_requests_global',
977 998 pull_request_id=pull_request.pull_request_id,)
978 999 else:
979 1000 return request.route_url(
980 1001 'pullrequest_show',
981 1002 repo_name=safe_str(pull_request.target_repo.repo_name),
982 1003 pull_request_id=pull_request.pull_request_id,)
983 1004
984 1005 def get_shadow_clone_url(self, pull_request):
985 1006 """
986 1007 Returns qualified url pointing to the shadow repository. If this pull
987 1008 request is closed there is no shadow repository and ``None`` will be
988 1009 returned.
989 1010 """
990 1011 if pull_request.is_closed():
991 1012 return None
992 1013 else:
993 1014 pr_url = urllib.unquote(self.get_url(pull_request))
994 1015 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
995 1016
996 1017 def notify_reviewers(self, pull_request, reviewers_ids):
997 1018 # notification to reviewers
998 1019 if not reviewers_ids:
999 1020 return
1000 1021
1001 1022 pull_request_obj = pull_request
1002 1023 # get the current participants of this pull request
1003 1024 recipients = reviewers_ids
1004 1025 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1005 1026
1006 1027 pr_source_repo = pull_request_obj.source_repo
1007 1028 pr_target_repo = pull_request_obj.target_repo
1008 1029
1009 1030 pr_url = h.url(
1010 1031 'pullrequest_show',
1011 1032 repo_name=pr_target_repo.repo_name,
1012 1033 pull_request_id=pull_request_obj.pull_request_id,
1013 1034 qualified=True,)
1014 1035
1015 1036 # set some variables for email notification
1016 1037 pr_target_repo_url = h.route_url(
1017 1038 'repo_summary', repo_name=pr_target_repo.repo_name)
1018 1039
1019 1040 pr_source_repo_url = h.route_url(
1020 1041 'repo_summary', repo_name=pr_source_repo.repo_name)
1021 1042
1022 1043 # pull request specifics
1023 1044 pull_request_commits = [
1024 1045 (x.raw_id, x.message)
1025 1046 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1026 1047
1027 1048 kwargs = {
1028 1049 'user': pull_request.author,
1029 1050 'pull_request': pull_request_obj,
1030 1051 'pull_request_commits': pull_request_commits,
1031 1052
1032 1053 'pull_request_target_repo': pr_target_repo,
1033 1054 'pull_request_target_repo_url': pr_target_repo_url,
1034 1055
1035 1056 'pull_request_source_repo': pr_source_repo,
1036 1057 'pull_request_source_repo_url': pr_source_repo_url,
1037 1058
1038 1059 'pull_request_url': pr_url,
1039 1060 }
1040 1061
1041 1062 # pre-generate the subject for notification itself
1042 1063 (subject,
1043 1064 _h, _e, # we don't care about those
1044 1065 body_plaintext) = EmailNotificationModel().render_email(
1045 1066 notification_type, **kwargs)
1046 1067
1047 1068 # create notification objects, and emails
1048 1069 NotificationModel().create(
1049 1070 created_by=pull_request.author,
1050 1071 notification_subject=subject,
1051 1072 notification_body=body_plaintext,
1052 1073 notification_type=notification_type,
1053 1074 recipients=recipients,
1054 1075 email_kwargs=kwargs,
1055 1076 )
1056 1077
1057 def delete(self, pull_request):
1078 def delete(self, pull_request, user):
1058 1079 pull_request = self.__get_pull_request(pull_request)
1080 old_data = pull_request.get_api_data(with_merge_state=False)
1059 1081 self._cleanup_merge_workspace(pull_request)
1082 self._log_audit_action(
1083 'repo.pull_request.delete', {'old_data': old_data},
1084 user, pull_request)
1060 1085 Session().delete(pull_request)
1061 1086
1062 1087 def close_pull_request(self, pull_request, user):
1063 1088 pull_request = self.__get_pull_request(pull_request)
1064 1089 self._cleanup_merge_workspace(pull_request)
1065 1090 pull_request.status = PullRequest.STATUS_CLOSED
1066 1091 pull_request.updated_on = datetime.datetime.now()
1067 1092 Session().add(pull_request)
1068 1093 self._trigger_pull_request_hook(
1069 1094 pull_request, pull_request.author, 'close')
1070 self._log_action('user_closed_pull_request', user, pull_request)
1095 self._log_audit_action(
1096 'repo.pull_request.close', {}, user, pull_request)
1071 1097
1072 1098 def close_pull_request_with_comment(
1073 1099 self, pull_request, user, repo, message=None):
1074 1100
1075 1101 pull_request_review_status = pull_request.calculated_review_status()
1076 1102
1077 1103 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1078 1104 # approved only if we have voting consent
1079 1105 status = ChangesetStatus.STATUS_APPROVED
1080 1106 else:
1081 1107 status = ChangesetStatus.STATUS_REJECTED
1082 1108 status_lbl = ChangesetStatus.get_status_lbl(status)
1083 1109
1084 1110 default_message = (
1085 1111 _('Closing with status change {transition_icon} {status}.')
1086 1112 ).format(transition_icon='>', status=status_lbl)
1087 1113 text = message or default_message
1088 1114
1089 1115 # create a comment, and link it to new status
1090 1116 comment = CommentsModel().create(
1091 1117 text=text,
1092 1118 repo=repo.repo_id,
1093 1119 user=user.user_id,
1094 1120 pull_request=pull_request.pull_request_id,
1095 1121 status_change=status_lbl,
1096 1122 status_change_type=status,
1097 1123 closing_pr=True
1098 1124 )
1099 1125
1100 1126 # calculate old status before we change it
1101 1127 old_calculated_status = pull_request.calculated_review_status()
1102 1128 ChangesetStatusModel().set_status(
1103 1129 repo.repo_id,
1104 1130 status,
1105 1131 user.user_id,
1106 1132 comment=comment,
1107 1133 pull_request=pull_request.pull_request_id
1108 1134 )
1109 1135
1110 1136 Session().flush()
1111 1137 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1112 1138 # we now calculate the status of pull request again, and based on that
1113 1139 # calculation trigger status change. This might happen in cases
1114 1140 # that non-reviewer admin closes a pr, which means his vote doesn't
1115 1141 # change the status, while if he's a reviewer this might change it.
1116 1142 calculated_status = pull_request.calculated_review_status()
1117 1143 if old_calculated_status != calculated_status:
1118 1144 self._trigger_pull_request_hook(
1119 1145 pull_request, user, 'review_status_change')
1120 1146
1121 1147 # finally close the PR
1122 1148 PullRequestModel().close_pull_request(
1123 1149 pull_request.pull_request_id, user)
1124 1150
1125 1151 return comment, status
1126 1152
1127 1153 def merge_status(self, pull_request):
1128 1154 if not self._is_merge_enabled(pull_request):
1129 1155 return False, _('Server-side pull request merging is disabled.')
1130 1156 if pull_request.is_closed():
1131 1157 return False, _('This pull request is closed.')
1132 1158 merge_possible, msg = self._check_repo_requirements(
1133 1159 target=pull_request.target_repo, source=pull_request.source_repo)
1134 1160 if not merge_possible:
1135 1161 return merge_possible, msg
1136 1162
1137 1163 try:
1138 1164 resp = self._try_merge(pull_request)
1139 1165 log.debug("Merge response: %s", resp)
1140 1166 status = resp.possible, self.merge_status_message(
1141 1167 resp.failure_reason)
1142 1168 except NotImplementedError:
1143 1169 status = False, _('Pull request merging is not supported.')
1144 1170
1145 1171 return status
1146 1172
1147 1173 def _check_repo_requirements(self, target, source):
1148 1174 """
1149 1175 Check if `target` and `source` have compatible requirements.
1150 1176
1151 1177 Currently this is just checking for largefiles.
1152 1178 """
1153 1179 target_has_largefiles = self._has_largefiles(target)
1154 1180 source_has_largefiles = self._has_largefiles(source)
1155 1181 merge_possible = True
1156 1182 message = u''
1157 1183
1158 1184 if target_has_largefiles != source_has_largefiles:
1159 1185 merge_possible = False
1160 1186 if source_has_largefiles:
1161 1187 message = _(
1162 1188 'Target repository large files support is disabled.')
1163 1189 else:
1164 1190 message = _(
1165 1191 'Source repository large files support is disabled.')
1166 1192
1167 1193 return merge_possible, message
1168 1194
1169 1195 def _has_largefiles(self, repo):
1170 1196 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1171 1197 'extensions', 'largefiles')
1172 1198 return largefiles_ui and largefiles_ui[0].active
1173 1199
1174 1200 def _try_merge(self, pull_request):
1175 1201 """
1176 1202 Try to merge the pull request and return the merge status.
1177 1203 """
1178 1204 log.debug(
1179 1205 "Trying out if the pull request %s can be merged.",
1180 1206 pull_request.pull_request_id)
1181 1207 target_vcs = pull_request.target_repo.scm_instance()
1182 1208
1183 1209 # Refresh the target reference.
1184 1210 try:
1185 1211 target_ref = self._refresh_reference(
1186 1212 pull_request.target_ref_parts, target_vcs)
1187 1213 except CommitDoesNotExistError:
1188 1214 merge_state = MergeResponse(
1189 1215 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1190 1216 return merge_state
1191 1217
1192 1218 target_locked = pull_request.target_repo.locked
1193 1219 if target_locked and target_locked[0]:
1194 1220 log.debug("The target repository is locked.")
1195 1221 merge_state = MergeResponse(
1196 1222 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1197 1223 elif self._needs_merge_state_refresh(pull_request, target_ref):
1198 1224 log.debug("Refreshing the merge status of the repository.")
1199 1225 merge_state = self._refresh_merge_state(
1200 1226 pull_request, target_vcs, target_ref)
1201 1227 else:
1202 1228 possible = pull_request.\
1203 1229 _last_merge_status == MergeFailureReason.NONE
1204 1230 merge_state = MergeResponse(
1205 1231 possible, False, None, pull_request._last_merge_status)
1206 1232
1207 1233 return merge_state
1208 1234
1209 1235 def _refresh_reference(self, reference, vcs_repository):
1210 1236 if reference.type in ('branch', 'book'):
1211 1237 name_or_id = reference.name
1212 1238 else:
1213 1239 name_or_id = reference.commit_id
1214 1240 refreshed_commit = vcs_repository.get_commit(name_or_id)
1215 1241 refreshed_reference = Reference(
1216 1242 reference.type, reference.name, refreshed_commit.raw_id)
1217 1243 return refreshed_reference
1218 1244
1219 1245 def _needs_merge_state_refresh(self, pull_request, target_reference):
1220 1246 return not(
1221 1247 pull_request.revisions and
1222 1248 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1223 1249 target_reference.commit_id == pull_request._last_merge_target_rev)
1224 1250
1225 1251 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1226 1252 workspace_id = self._workspace_id(pull_request)
1227 1253 source_vcs = pull_request.source_repo.scm_instance()
1228 1254 use_rebase = self._use_rebase_for_merging(pull_request)
1229 1255 merge_state = target_vcs.merge(
1230 1256 target_reference, source_vcs, pull_request.source_ref_parts,
1231 1257 workspace_id, dry_run=True, use_rebase=use_rebase)
1232 1258
1233 1259 # Do not store the response if there was an unknown error.
1234 1260 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1235 1261 pull_request._last_merge_source_rev = \
1236 1262 pull_request.source_ref_parts.commit_id
1237 1263 pull_request._last_merge_target_rev = target_reference.commit_id
1238 1264 pull_request._last_merge_status = merge_state.failure_reason
1239 1265 pull_request.shadow_merge_ref = merge_state.merge_ref
1240 1266 Session().add(pull_request)
1241 1267 Session().commit()
1242 1268
1243 1269 return merge_state
1244 1270
1245 1271 def _workspace_id(self, pull_request):
1246 1272 workspace_id = 'pr-%s' % pull_request.pull_request_id
1247 1273 return workspace_id
1248 1274
1249 1275 def merge_status_message(self, status_code):
1250 1276 """
1251 1277 Return a human friendly error message for the given merge status code.
1252 1278 """
1253 1279 return self.MERGE_STATUS_MESSAGES[status_code]
1254 1280
1255 1281 def generate_repo_data(self, repo, commit_id=None, branch=None,
1256 1282 bookmark=None):
1257 1283 all_refs, selected_ref = \
1258 1284 self._get_repo_pullrequest_sources(
1259 1285 repo.scm_instance(), commit_id=commit_id,
1260 1286 branch=branch, bookmark=bookmark)
1261 1287
1262 1288 refs_select2 = []
1263 1289 for element in all_refs:
1264 1290 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1265 1291 refs_select2.append({'text': element[1], 'children': children})
1266 1292
1267 1293 return {
1268 1294 'user': {
1269 1295 'user_id': repo.user.user_id,
1270 1296 'username': repo.user.username,
1271 1297 'firstname': repo.user.firstname,
1272 1298 'lastname': repo.user.lastname,
1273 1299 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1274 1300 },
1275 1301 'description': h.chop_at_smart(repo.description, '\n'),
1276 1302 'refs': {
1277 1303 'all_refs': all_refs,
1278 1304 'selected_ref': selected_ref,
1279 1305 'select2_refs': refs_select2
1280 1306 }
1281 1307 }
1282 1308
1283 1309 def generate_pullrequest_title(self, source, source_ref, target):
1284 1310 return u'{source}#{at_ref} to {target}'.format(
1285 1311 source=source,
1286 1312 at_ref=source_ref,
1287 1313 target=target,
1288 1314 )
1289 1315
1290 1316 def _cleanup_merge_workspace(self, pull_request):
1291 1317 # Merging related cleanup
1292 1318 target_scm = pull_request.target_repo.scm_instance()
1293 1319 workspace_id = 'pr-%s' % pull_request.pull_request_id
1294 1320
1295 1321 try:
1296 1322 target_scm.cleanup_merge_workspace(workspace_id)
1297 1323 except NotImplementedError:
1298 1324 pass
1299 1325
1300 1326 def _get_repo_pullrequest_sources(
1301 1327 self, repo, commit_id=None, branch=None, bookmark=None):
1302 1328 """
1303 1329 Return a structure with repo's interesting commits, suitable for
1304 1330 the selectors in pullrequest controller
1305 1331
1306 1332 :param commit_id: a commit that must be in the list somehow
1307 1333 and selected by default
1308 1334 :param branch: a branch that must be in the list and selected
1309 1335 by default - even if closed
1310 1336 :param bookmark: a bookmark that must be in the list and selected
1311 1337 """
1312 1338
1313 1339 commit_id = safe_str(commit_id) if commit_id else None
1314 1340 branch = safe_str(branch) if branch else None
1315 1341 bookmark = safe_str(bookmark) if bookmark else None
1316 1342
1317 1343 selected = None
1318 1344
1319 1345 # order matters: first source that has commit_id in it will be selected
1320 1346 sources = []
1321 1347 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1322 1348 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1323 1349
1324 1350 if commit_id:
1325 1351 ref_commit = (h.short_id(commit_id), commit_id)
1326 1352 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1327 1353
1328 1354 sources.append(
1329 1355 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1330 1356 )
1331 1357
1332 1358 groups = []
1333 1359 for group_key, ref_list, group_name, match in sources:
1334 1360 group_refs = []
1335 1361 for ref_name, ref_id in ref_list:
1336 1362 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1337 1363 group_refs.append((ref_key, ref_name))
1338 1364
1339 1365 if not selected:
1340 1366 if set([commit_id, match]) & set([ref_id, ref_name]):
1341 1367 selected = ref_key
1342 1368
1343 1369 if group_refs:
1344 1370 groups.append((group_refs, group_name))
1345 1371
1346 1372 if not selected:
1347 1373 ref = commit_id or branch or bookmark
1348 1374 if ref:
1349 1375 raise CommitDoesNotExistError(
1350 1376 'No commit refs could be found matching: %s' % ref)
1351 1377 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1352 1378 selected = 'branch:%s:%s' % (
1353 1379 repo.DEFAULT_BRANCH_NAME,
1354 1380 repo.branches[repo.DEFAULT_BRANCH_NAME]
1355 1381 )
1356 1382 elif repo.commit_ids:
1357 1383 rev = repo.commit_ids[0]
1358 1384 selected = 'rev:%s:%s' % (rev, rev)
1359 1385 else:
1360 1386 raise EmptyRepositoryError()
1361 1387 return groups, selected
1362 1388
1363 1389 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1364 1390 return self._get_diff_from_pr_or_version(
1365 1391 source_repo, source_ref_id, target_ref_id, context=context)
1366 1392
1367 1393 def _get_diff_from_pr_or_version(
1368 1394 self, source_repo, source_ref_id, target_ref_id, context):
1369 1395 target_commit = source_repo.get_commit(
1370 1396 commit_id=safe_str(target_ref_id))
1371 1397 source_commit = source_repo.get_commit(
1372 1398 commit_id=safe_str(source_ref_id))
1373 1399 if isinstance(source_repo, Repository):
1374 1400 vcs_repo = source_repo.scm_instance()
1375 1401 else:
1376 1402 vcs_repo = source_repo
1377 1403
1378 1404 # TODO: johbo: In the context of an update, we cannot reach
1379 1405 # the old commit anymore with our normal mechanisms. It needs
1380 1406 # some sort of special support in the vcs layer to avoid this
1381 1407 # workaround.
1382 1408 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1383 1409 vcs_repo.alias == 'git'):
1384 1410 source_commit.raw_id = safe_str(source_ref_id)
1385 1411
1386 1412 log.debug('calculating diff between '
1387 1413 'source_ref:%s and target_ref:%s for repo `%s`',
1388 1414 target_ref_id, source_ref_id,
1389 1415 safe_unicode(vcs_repo.path))
1390 1416
1391 1417 vcs_diff = vcs_repo.get_diff(
1392 1418 commit1=target_commit, commit2=source_commit, context=context)
1393 1419 return vcs_diff
1394 1420
1395 1421 def _is_merge_enabled(self, pull_request):
1396 1422 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1397 1423 settings = settings_model.get_general_settings()
1398 1424 return settings.get('rhodecode_pr_merge_enabled', False)
1399 1425
1400 1426 def _use_rebase_for_merging(self, pull_request):
1401 1427 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1402 1428 settings = settings_model.get_general_settings()
1403 1429 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1404 1430
1405 def _log_action(self, action, user, pull_request):
1406 action_logger(
1407 user,
1408 '{action}:{pr_id}'.format(
1409 action=action, pr_id=pull_request.pull_request_id),
1410 pull_request.target_repo)
1431 def _log_audit_action(self, action, action_data, user, pull_request):
1432 audit_logger.store(
1433 action=action,
1434 action_data=action_data,
1435 user=user,
1436 repo=pull_request.target_repo)
1411 1437
1412 1438 def get_reviewer_functions(self):
1413 1439 """
1414 1440 Fetches functions for validation and fetching default reviewers.
1415 1441 If available we use the EE package, else we fallback to CE
1416 1442 package functions
1417 1443 """
1418 1444 try:
1419 1445 from rc_reviewers.utils import get_default_reviewers_data
1420 1446 from rc_reviewers.utils import validate_default_reviewers
1421 1447 except ImportError:
1422 1448 from rhodecode.apps.repository.utils import \
1423 1449 get_default_reviewers_data
1424 1450 from rhodecode.apps.repository.utils import \
1425 1451 validate_default_reviewers
1426 1452
1427 1453 return get_default_reviewers_data, validate_default_reviewers
1428 1454
1429 1455
1430 1456 class MergeCheck(object):
1431 1457 """
1432 1458 Perform Merge Checks and returns a check object which stores information
1433 1459 about merge errors, and merge conditions
1434 1460 """
1435 1461 TODO_CHECK = 'todo'
1436 1462 PERM_CHECK = 'perm'
1437 1463 REVIEW_CHECK = 'review'
1438 1464 MERGE_CHECK = 'merge'
1439 1465
1440 1466 def __init__(self):
1441 1467 self.review_status = None
1442 1468 self.merge_possible = None
1443 1469 self.merge_msg = ''
1444 1470 self.failed = None
1445 1471 self.errors = []
1446 1472 self.error_details = OrderedDict()
1447 1473
1448 1474 def push_error(self, error_type, message, error_key, details):
1449 1475 self.failed = True
1450 1476 self.errors.append([error_type, message])
1451 1477 self.error_details[error_key] = dict(
1452 1478 details=details,
1453 1479 error_type=error_type,
1454 1480 message=message
1455 1481 )
1456 1482
1457 1483 @classmethod
1458 1484 def validate(cls, pull_request, user, fail_early=False, translator=None):
1459 1485 # if migrated to pyramid...
1460 1486 # _ = lambda: translator or _ # use passed in translator if any
1461 1487
1462 1488 merge_check = cls()
1463 1489
1464 1490 # permissions to merge
1465 1491 user_allowed_to_merge = PullRequestModel().check_user_merge(
1466 1492 pull_request, user)
1467 1493 if not user_allowed_to_merge:
1468 1494 log.debug("MergeCheck: cannot merge, approval is pending.")
1469 1495
1470 1496 msg = _('User `{}` not allowed to perform merge.').format(user.username)
1471 1497 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
1472 1498 if fail_early:
1473 1499 return merge_check
1474 1500
1475 1501 # review status, must be always present
1476 1502 review_status = pull_request.calculated_review_status()
1477 1503 merge_check.review_status = review_status
1478 1504
1479 1505 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1480 1506 if not status_approved:
1481 1507 log.debug("MergeCheck: cannot merge, approval is pending.")
1482 1508
1483 1509 msg = _('Pull request reviewer approval is pending.')
1484 1510
1485 1511 merge_check.push_error(
1486 1512 'warning', msg, cls.REVIEW_CHECK, review_status)
1487 1513
1488 1514 if fail_early:
1489 1515 return merge_check
1490 1516
1491 1517 # left over TODOs
1492 1518 todos = CommentsModel().get_unresolved_todos(pull_request)
1493 1519 if todos:
1494 1520 log.debug("MergeCheck: cannot merge, {} "
1495 1521 "unresolved todos left.".format(len(todos)))
1496 1522
1497 1523 if len(todos) == 1:
1498 1524 msg = _('Cannot merge, {} TODO still not resolved.').format(
1499 1525 len(todos))
1500 1526 else:
1501 1527 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1502 1528 len(todos))
1503 1529
1504 1530 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1505 1531
1506 1532 if fail_early:
1507 1533 return merge_check
1508 1534
1509 1535 # merge possible
1510 1536 merge_status, msg = PullRequestModel().merge_status(pull_request)
1511 1537 merge_check.merge_possible = merge_status
1512 1538 merge_check.merge_msg = msg
1513 1539 if not merge_status:
1514 1540 log.debug(
1515 1541 "MergeCheck: cannot merge, pull request merge not possible.")
1516 1542 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1517 1543
1518 1544 if fail_early:
1519 1545 return merge_check
1520 1546
1521 1547 return merge_check
1522 1548
1523 1549
1524 1550 ChangeTuple = namedtuple('ChangeTuple',
1525 1551 ['added', 'common', 'removed', 'total'])
1526 1552
1527 1553 FileChangeTuple = namedtuple('FileChangeTuple',
1528 1554 ['added', 'modified', 'removed'])
@@ -1,210 +1,213 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22 from rhodecode.model.db import User, UserIpMap
23 23 from rhodecode.model.permission import PermissionModel
24 24 from rhodecode.tests import (
25 25 TestController, url, clear_all_caches, assert_session_flash)
26 26
27 27
28 28 class TestAdminPermissionsController(TestController):
29 29
30 30 @pytest.fixture(scope='class', autouse=True)
31 31 def prepare(self, request):
32 32 # cleanup and reset to default permissions after
33 33 @request.addfinalizer
34 34 def cleanup():
35 35 PermissionModel().create_default_user_permissions(
36 36 User.get_default_user(), force=True)
37 37
38 38 def test_index_application(self):
39 39 self.log_user()
40 40 self.app.get(url('admin_permissions_application'))
41 41
42 42 @pytest.mark.parametrize(
43 43 'anonymous, default_register, default_register_message, default_password_reset,'
44 44 'default_extern_activate, expect_error, expect_form_error', [
45 45 (True, 'hg.register.none', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
46 46 False, False),
47 47 (True, 'hg.register.manual_activate', '', 'hg.password_reset.enabled', 'hg.extern_activate.auto',
48 48 False, False),
49 49 (True, 'hg.register.auto_activate', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
50 50 False, False),
51 51 (True, 'hg.register.auto_activate', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
52 52 False, False),
53 53 (True, 'hg.register.XXX', '', 'hg.password_reset.enabled', 'hg.extern_activate.manual',
54 54 False, True),
55 55 (True, '', '', 'hg.password_reset.enabled', '', True, False),
56 56 ])
57 57 def test_update_application_permissions(
58 58 self, anonymous, default_register, default_register_message, default_password_reset,
59 59 default_extern_activate, expect_error, expect_form_error):
60 60
61 61 self.log_user()
62 62
63 63 # TODO: anonymous access set here to False, breaks some other tests
64 64 params = {
65 65 'csrf_token': self.csrf_token,
66 66 'anonymous': anonymous,
67 67 'default_register': default_register,
68 68 'default_register_message': default_register_message,
69 69 'default_password_reset': default_password_reset,
70 70 'default_extern_activate': default_extern_activate,
71 71 }
72 72 response = self.app.post(url('admin_permissions_application'),
73 73 params=params)
74 74 if expect_form_error:
75 75 assert response.status_int == 200
76 76 response.mustcontain('Value must be one of')
77 77 else:
78 78 if expect_error:
79 79 msg = 'Error occurred during update of permissions'
80 80 else:
81 81 msg = 'Application permissions updated successfully'
82 82 assert_session_flash(response, msg)
83 83
84 84 def test_index_object(self):
85 85 self.log_user()
86 86 self.app.get(url('admin_permissions_object'))
87 87
88 88 @pytest.mark.parametrize(
89 89 'repo, repo_group, user_group, expect_error, expect_form_error', [
90 90 ('repository.none', 'group.none', 'usergroup.none', False, False),
91 91 ('repository.read', 'group.read', 'usergroup.read', False, False),
92 92 ('repository.write', 'group.write', 'usergroup.write',
93 93 False, False),
94 94 ('repository.admin', 'group.admin', 'usergroup.admin',
95 95 False, False),
96 96 ('repository.XXX', 'group.admin', 'usergroup.admin', False, True),
97 97 ('', '', '', True, False),
98 98 ])
99 99 def test_update_object_permissions(self, repo, repo_group, user_group,
100 100 expect_error, expect_form_error):
101 101 self.log_user()
102 102
103 103 params = {
104 104 'csrf_token': self.csrf_token,
105 105 'default_repo_perm': repo,
106 106 'overwrite_default_repo': False,
107 107 'default_group_perm': repo_group,
108 108 'overwrite_default_group': False,
109 109 'default_user_group_perm': user_group,
110 110 'overwrite_default_user_group': False,
111 111 }
112 112 response = self.app.post(url('admin_permissions_object'),
113 113 params=params)
114 114 if expect_form_error:
115 115 assert response.status_int == 200
116 116 response.mustcontain('Value must be one of')
117 117 else:
118 118 if expect_error:
119 119 msg = 'Error occurred during update of permissions'
120 120 else:
121 121 msg = 'Object permissions updated successfully'
122 122 assert_session_flash(response, msg)
123 123
124 124 def test_index_global(self):
125 125 self.log_user()
126 126 self.app.get(url('admin_permissions_global'))
127 127
128 128 @pytest.mark.parametrize(
129 129 'repo_create, repo_create_write, user_group_create, repo_group_create,'
130 130 'fork_create, inherit_default_permissions, expect_error,'
131 131 'expect_form_error', [
132 132 ('hg.create.none', 'hg.create.write_on_repogroup.false',
133 133 'hg.usergroup.create.false', 'hg.repogroup.create.false',
134 134 'hg.fork.none', 'hg.inherit_default_perms.false', False, False),
135 135 ('hg.create.repository', 'hg.create.write_on_repogroup.true',
136 136 'hg.usergroup.create.true', 'hg.repogroup.create.true',
137 137 'hg.fork.repository', 'hg.inherit_default_perms.false',
138 138 False, False),
139 139 ('hg.create.XXX', 'hg.create.write_on_repogroup.true',
140 140 'hg.usergroup.create.true', 'hg.repogroup.create.true',
141 141 'hg.fork.repository', 'hg.inherit_default_perms.false',
142 142 False, True),
143 143 ('', '', '', '', '', '', True, False),
144 144 ])
145 145 def test_update_global_permissions(
146 146 self, repo_create, repo_create_write, user_group_create,
147 147 repo_group_create, fork_create, inherit_default_permissions,
148 148 expect_error, expect_form_error):
149 149 self.log_user()
150 150
151 151 params = {
152 152 'csrf_token': self.csrf_token,
153 153 'default_repo_create': repo_create,
154 154 'default_repo_create_on_write': repo_create_write,
155 155 'default_user_group_create': user_group_create,
156 156 'default_repo_group_create': repo_group_create,
157 157 'default_fork_create': fork_create,
158 158 'default_inherit_default_permissions': inherit_default_permissions
159 159 }
160 160 response = self.app.post(url('admin_permissions_global'),
161 161 params=params)
162 162 if expect_form_error:
163 163 assert response.status_int == 200
164 164 response.mustcontain('Value must be one of')
165 165 else:
166 166 if expect_error:
167 167 msg = 'Error occurred during update of permissions'
168 168 else:
169 169 msg = 'Global permissions updated successfully'
170 170 assert_session_flash(response, msg)
171 171
172 172 def test_index_ips(self):
173 173 self.log_user()
174 174 response = self.app.get(url('admin_permissions_ips'))
175 175 # TODO: Test response...
176 176 response.mustcontain('All IP addresses are allowed')
177 177
178 178 def test_add_delete_ips(self):
179 179 self.log_user()
180 180 clear_all_caches()
181 181
182 182 # ADD
183 183 default_user_id = User.get_default_user().user_id
184 184 response = self.app.post(
185 185 url('edit_user_ips', user_id=default_user_id),
186 186 params={'new_ip': '127.0.0.0/24', '_method': 'put',
187 187 'csrf_token': self.csrf_token})
188 188
189 189 response = self.app.get(url('admin_permissions_ips'))
190 190 response.mustcontain('127.0.0.0/24')
191 191 response.mustcontain('127.0.0.0 - 127.0.0.255')
192 192
193 193 # DELETE
194 194 default_user_id = User.get_default_user().user_id
195 195 del_ip_id = UserIpMap.query().filter(UserIpMap.user_id ==
196 196 default_user_id).first().ip_id
197 197
198 198 response = self.app.post(
199 199 url('edit_user_ips', user_id=default_user_id),
200 200 params={'_method': 'delete', 'del_ip_id': del_ip_id,
201 201 'csrf_token': self.csrf_token})
202
203 assert_session_flash(response, 'Removed ip address from user whitelist')
204
202 205 clear_all_caches()
203 206 response = self.app.get(url('admin_permissions_ips'))
204 207 response.mustcontain('All IP addresses are allowed')
205 208 response.mustcontain(no=['127.0.0.0/24'])
206 209 response.mustcontain(no=['127.0.0.0 - 127.0.0.255'])
207 210
208 211 def test_index_overview(self):
209 212 self.log_user()
210 213 self.app.get(url('admin_permissions_overview'))
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now